Skip to content

Commit

Permalink
Bugfix: Preparing flow export from server artifact flows (#2606)
Browse files Browse the repository at this point in the history
Did not include uploaded bulk files.

Also fixed bug in recent client search: sort by recently active before
paging to maintain stable paging behavior.
  • Loading branch information
scudette committed Apr 4, 2023
1 parent 8e2512c commit 6ffdfdb
Show file tree
Hide file tree
Showing 8 changed files with 182 additions and 27 deletions.
44 changes: 44 additions & 0 deletions file_store/test_utils/unzip.go
@@ -0,0 +1,44 @@
package test_utils

import (
"archive/zip"
"io"

config_proto "www.velocidex.com/golang/velociraptor/config/proto"
"www.velocidex.com/golang/velociraptor/file_store"
"www.velocidex.com/golang/velociraptor/file_store/api"
"www.velocidex.com/golang/velociraptor/utils"
)

func UnzipToFilestore(
config_obj *config_proto.Config,
base api.FSPathSpec,
zip_path string) error {

reader, err := zip.OpenReader(zip_path)
if err != nil {
return err
}
defer reader.Close()

file_store_factory := file_store.GetFileStore(config_obj)

for _, f := range reader.File {
components := utils.SplitComponents(f.Name)
output_path := base.AddChild(components...).
SetType(api.PATH_TYPE_FILESTORE_ANY)
fd, err := file_store_factory.WriteFile(output_path)
if err != nil {
return err
}
infd, err := reader.Open(f.Name)
if err != nil {
continue
}

io.Copy(fd, infd)
fd.Close()
}

return nil
}
10 changes: 8 additions & 2 deletions file_store/uploader/uploader.go
Expand Up @@ -11,6 +11,7 @@ import (
"www.velocidex.com/golang/velociraptor/accessors"
config_proto "www.velocidex.com/golang/velociraptor/config/proto"
"www.velocidex.com/golang/velociraptor/file_store/api"
"www.velocidex.com/golang/velociraptor/file_store/path_specs"
"www.velocidex.com/golang/velociraptor/uploads"
"www.velocidex.com/golang/vfilter"
)
Expand Down Expand Up @@ -85,13 +86,18 @@ loop:
}
}

scope.Log("Uploaded %v (%v bytes)", output_path.AsClientPath(), offset)
// Return paths relative to the storage root.
relative_path := path_specs.NewUnsafeFilestorePath(store_as_name.Components...).
SetType(api.PATH_TYPE_FILESTORE_ANY)

scope.Log("Uploaded %v (%v bytes)", relative_path.AsClientPath(), offset)
return &uploads.UploadResponse{
Path: output_path.AsClientPath(),
Path: relative_path.AsClientPath(),
Size: uint64(offset),
StoredSize: uint64(offset),
Sha256: hex.EncodeToString(sha_sum.Sum(nil)),
Md5: hex.EncodeToString(md5_sum.Sum(nil)),
// Full components to the file in Components
Components: output_path.Components(),
}, nil
}
Expand Down
24 changes: 14 additions & 10 deletions services/indexing/search.go
Expand Up @@ -94,13 +94,7 @@ func (self *Indexer) searchRecents(
}()

// Return all the valid records
total_count := 0
for api_client := range resolver.Out {
total_count++
if uint64(total_count) < in.Offset {
continue
}

// Skip clients that are offline
if in.Filter == api_proto.SearchClientsRequest_ONLINE &&
now > api_client.LastSeenAt &&
Expand All @@ -109,16 +103,26 @@ func (self *Indexer) searchRecents(
}

result.Items = append(result.Items, api_client)
if uint64(len(result.Items)) > limit {
return result, nil
}
}

// Sort the children in reverse order - most recent first.
// Sort the results in reverse order - most recent first.
sort.Slice(result.Items, func(i, j int) bool {
return result.Items[i].FirstSeenAt > result.Items[j].FirstSeenAt
})

// Page the result properly
start := int(in.Offset)
if start > len(result.Items) {
result.Items = nil
return result, nil
}

end := int(in.Offset + limit)
if end > len(result.Items) {
end = len(result.Items) - 1
}
result.Items = result.Items[start:end]

return result, nil
}

Expand Down
2 changes: 1 addition & 1 deletion services/server_artifacts/server_artifacts_test.go
Expand Up @@ -238,7 +238,7 @@ sources:
log_data := test_utils.FileReadAll(self.T(), self.ConfigObj,
flow_path_manager.Log())
assert.Contains(self.T(), log_data,
"Uploaded /clients/server/collections/F.1234/uploads/test.txt")
"Uploaded /test.txt")

// Make sure the upload data is stored in the upload file.
uploads_data := test_utils.FileReadAll(self.T(), self.ConfigObj,
Expand Down
27 changes: 14 additions & 13 deletions vql/server/downloads/downloads.go
Expand Up @@ -181,8 +181,7 @@ func createDownloadFile(
return nil, errors.New("Client Id and Flow Id should be specified.")
}

hostname := services.GetHostname(
ctx, config_obj, client_id) + "-" + client_id
hostname := services.GetHostname(ctx, config_obj, client_id)
flow_path_manager := paths.NewFlowPathManager(client_id, flow_id)
download_file := flow_path_manager.GetDownloadsFile(hostname, password != "")
if name != "" {
Expand Down Expand Up @@ -318,17 +317,19 @@ func downloadFlowToZip(
}

// Copy artifact results
for _, name := range flow_details.Context.ArtifactsWithResults {
artifact_path_manager, err := artifacts.NewArtifactPathManager(ctx,
config_obj, client_id, flow_id, name)
if err != nil {
continue
}
if flow_details != nil && flow_details.Context != nil {
for _, name := range flow_details.Context.ArtifactsWithResults {
artifact_path_manager, err := artifacts.NewArtifactPathManager(ctx,
config_obj, client_id, flow_id, name)
if err != nil {
continue
}

err = copyResultSetIntoContainer(ctx, config_obj, zip_writer, format,
artifact_path_manager.Path(), prefix.AddChild("results", name))
if err != nil {
return err
err = copyResultSetIntoContainer(ctx, config_obj, zip_writer, format,
artifact_path_manager.Path(), prefix.AddChild("results", name))
if err != nil {
return err
}
}
}

Expand Down Expand Up @@ -438,7 +439,7 @@ func copyUploadFiles(
// Otherwise we need to look at the filestore
// components and derive the client's components from
// there.
} else if len(components) > 6 && components[0] == "clients" {
} else if len(components) > 4 && components[0] == "clients" {
//Remove the prefix in the file store where the files
//are stored. The uploads file in the file store
//refers to the location in the filestore where the
Expand Down
43 changes: 42 additions & 1 deletion vql/server/downloads/downloads_test.go
Expand Up @@ -60,6 +60,47 @@ sources:
launcher.SetFlowIdForTests("F.1234")
}

func (self *TestSuite) TestExportCollectionServerArtifact() {
import_file_path, err := filepath.Abs("fixtures/export_server_artifact.zip")
assert.NoError(self.T(), err)

test_utils.UnzipToFilestore(self.ConfigObj,
path_specs.NewUnsafeFilestorePath("clients", "server", "collections"),
import_file_path)

// test_utils.GetMemoryFileStore(self.T(), self.ConfigObj).Debug()
manager, _ := services.GetRepositoryManager(self.ConfigObj)
builder := services.ScopeBuilder{
Config: self.ConfigObj,
ACLManager: acl_managers.NullACLManager{},
Logger: logging.NewPlainLogger(self.ConfigObj, &logging.FrontendComponent),
Env: ordereddict.NewDict(),
}

ctx := self.Ctx
scope := manager.BuildScope(builder)

// Now create the download export. The plugin returns a filestore
// pathspec to the created download file.
result := (&CreateFlowDownload{}).Call(ctx, scope,
ordereddict.NewDict().
Set("client_id", "server").
Set("flow_id", "F.CGLR6OS84DP00").
Set("wait", true).
Set("expand_sparse", false).
Set("name", "Test"))

// A zip file was created
path_spec, ok := result.(path_specs.FSPathSpec)
assert.True(self.T(), ok)

file_details, err := openZipFile(self.ConfigObj, scope, path_spec)
assert.NoError(self.T(), err)

goldie.Assert(self.T(), "TestExportCollectionServerArtifact",
json.MustMarshalIndent(file_details))
}

// First import a collection from a zip file to create a
// collection. Then we export the collection back into zip files to
// test the export functionality.
Expand All @@ -73,7 +114,7 @@ func (self *TestSuite) TestExportCollection() {
Env: ordereddict.NewDict(),
}

ctx := context.Background()
ctx := self.Ctx
scope := manager.BuildScope(builder)

import_file_path, err := filepath.Abs("fixtures/export.zip")
Expand Down
@@ -0,0 +1,59 @@
{
"client_info.json": [
{}
],
"logs.json": [
{
"Timestamp": 1680585571852796,
"Level": "DEFAULT",
"message": "Running query on behalf of user mic\n"
},
{
"Timestamp": 1680585571852838,
"Level": "DEFAULT",
"message": "\u003cgreen\u003eStarting\u003c/\u003e query execution.\n"
},
{
"Timestamp": 1680585571855305,
"Level": "DEFAULT",
"message": "Uploaded /File0.txt.json (37 bytes)\n"
},
{
"Timestamp": 1680585571856397,
"Level": "DEFAULT",
"message": "Uploaded /File1.txt.json (37 bytes)\n"
},
{
"Timestamp": 1680585571857412,
"Level": "DEBUG",
"message": "Query Stats: {\"RowsScanned\":2,\"PluginsCalled\":1,\"FunctionsCalled\":4,\"ProtocolSearch\":0,\"ScopeCopy\":6}\n"
}
],
"uploads/File0.txt": "This is a long test with many letters",
"uploads/File1.txt": "This is a long test with many letters",
"uploads.json.index": "\u0000\u0000\u0000\u0000\u0000\u0001\u0000\u0000\ufffd\u0000\u0000\u0000\u0000\u0001\u0000\u0000",
"uploads.json": [
{
"Timestamp": 1680585571,
"started": "2023-04-04 05:19:31.855375283 +0000 UTC",
"vfs_path": "/File0.txt.json",
"_Components": [
"uploads",
"File0.txt"
],
"file_size": 37,
"uploaded_size": 37
},
{
"Timestamp": 1680585571,
"started": "2023-04-04 05:19:31.856444777 +0000 UTC",
"vfs_path": "/File1.txt.json",
"_Components": [
"uploads",
"File1.txt"
],
"file_size": 37,
"uploaded_size": 37
}
]
}
Binary file not shown.

0 comments on commit 6ffdfdb

Please sign in to comment.