Skip to content

Commit

Permalink
Frame adjustment (#1420)
Browse files Browse the repository at this point in the history
* adjust frame for CSV matching

* add frame filename matching

* updating tests
  • Loading branch information
BryonLewis authored May 24, 2024
1 parent 347abbb commit 336304c
Show file tree
Hide file tree
Showing 16 changed files with 106 additions and 65 deletions.
2 changes: 1 addition & 1 deletion client/dive-common/apispec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ interface Api {
getTileURL?(itemId: string, x: number, y: number, level: number, query: Record<string, any>):
string;
importAnnotationFile(id: string, path: string, file?: File,
additive?: boolean, additivePrepend?: string, set?: string): Promise<boolean>;
additive?: boolean, additivePrepend?: string, set?: string): Promise<boolean | string[]>;
}
const ApiSymbol = Symbol('api');

Expand Down
15 changes: 13 additions & 2 deletions client/dive-common/components/ImportAnnotations.vue
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ export default defineComponent({
if (!ret.canceled) {
menuOpen.value = false;
const path = ret.filePaths[0];
let importFile = false;
let importFile: boolean | string[] = false;
processing.value = true;
const set = currentSet.value === 'default' ? undefined : currentSet.value;
if (ret.fileList?.length) {
Expand All @@ -74,6 +74,17 @@ export default defineComponent({
set,
);
}
console.log(importFile);

Check warning on line 77 in client/dive-common/components/ImportAnnotations.vue

View workflow job for this annotation

GitHub Actions / Client Tests (web)

Unexpected console statement

Check warning on line 77 in client/dive-common/components/ImportAnnotations.vue

View workflow job for this annotation

GitHub Actions / Client Tests (electron)

Unexpected console statement
if (Array.isArray(importFile) && importFile.length) {
const text = ['There were warnings when importing. While the data imported properly please double check your annotations',
'Below is a list of information that can help with debugging',
].concat(importFile as string[]);
await prompt({
title: 'Import Warnings',
text,
positiveButton: 'OK',
});
}
if (importFile) {
processing.value = false;
Expand Down Expand Up @@ -188,7 +199,7 @@ export default defineComponent({
outlined
small
>
<template v-slot:selection="{ attrs, item, selected }">
<template #selection="{ attrs, item, selected }">
<v-chip
v-bind="attrs"
small
Expand Down
2 changes: 1 addition & 1 deletion client/platform/desktop/backend/cli.ts
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ function updater(update: DesktopJobUpdate) {

async function parseViameFile(file: string) {
const data = await parseFile(file);
stdout.write(JSON.stringify(data));
stdout.write(JSON.stringify(data[0]));
}

async function parseJsonFile(filepath: string, metapath: string) {
Expand Down
3 changes: 3 additions & 0 deletions client/platform/desktop/backend/ipcService.ts
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,9 @@ export default function register() {
id, path, additive, additivePrepend,
}: { id: string; path: string; additive: boolean; additivePrepend: string }) => {
const ret = await common.dataFileImport(settings.get(), id, path, additive, additivePrepend);
console.log(ret.warnings);
if (ret.warnings.length)
return ret.warnings;
return ret;
});

Expand Down
29 changes: 18 additions & 11 deletions client/platform/desktop/backend/native/common.ts
Original file line number Diff line number Diff line change
Expand Up @@ -578,13 +578,14 @@ async function _ingestFilePath(
imageMap?: Map<string, number>,
additive = false,
additivePrepend = '',
): Promise<(DatasetMetaMutable & { fps?: number }) | null> {
): Promise<[(DatasetMetaMutable & { fps?: number }), string[]] | null> {
if (!fs.existsSync(path)) {
return null;
}
if (fs.statSync(path).size === 0) {
return null;
}
let warnings: string[] = [];
// Make a copy of the file in aux
const projectInfo = getProjectDir(settings, datasetId);
const newPath = npath.join(projectInfo.auxDirAbsPath, `imported_${npath.basename(path)}`);
Expand Down Expand Up @@ -612,9 +613,10 @@ async function _ingestFilePath(
} else if (CsvFileName.test(path)) {
// VIAME CSV File
const data = await viameSerializers.parseFile(path, imageMap);
annotations.tracks = data.tracks;
annotations.groups = data.groups;
meta.fps = data.fps;
annotations.tracks = data[0].tracks;
annotations.groups = data[0].groups;
meta.fps = data[0].fps;
[, warnings] = data;
} else if (YAMLFileName.test(path)) {
annotations = await kpf.parse([path]);
}
Expand Down Expand Up @@ -652,7 +654,7 @@ async function _ingestFilePath(
await _saveSerialized(settings, datasetId, annotations, true);
}

return meta;
return [meta, warnings];
}

/**
Expand All @@ -679,17 +681,20 @@ async function ingestDataFiles(
): Promise<{
processedFiles: string[];
meta: DatasetMetaMutable & { fps?: number };
warnings: string[];
}> {
const processedFiles = []; // which files were processed to generate the detections
const meta = {};

let outwarnings: string[] = [];
for (let i = 0; i < absPaths.length; i += 1) {
const path = absPaths[i];
// eslint-disable-next-line no-await-in-loop
const newMeta = await _ingestFilePath(
const results = await _ingestFilePath(
settings, datasetId, path, imageMap, additive, additivePrepend,
);
if (newMeta !== null) {
if (results !== null) {
const [newMeta, warnings] = results;
outwarnings = warnings;
merge(meta, newMeta);
processedFiles.push(path);
}
Expand All @@ -702,15 +707,17 @@ async function ingestDataFiles(
const path = cameraAndPath[i][1];
const cameraDatasetId = `${datasetId}/${cameraName}`;
// eslint-disable-next-line no-await-in-loop
const newMeta = await _ingestFilePath(settings, cameraDatasetId, path, imageMap);
if (newMeta !== null) {
const results = await _ingestFilePath(settings, cameraDatasetId, path, imageMap);
if (results !== null) {
const [newMeta, warnings] = results;
outwarnings = outwarnings.concat(warnings);
merge(meta, newMeta);
processedFiles.push(path);
}
}
}

return { processedFiles, meta };
return { processedFiles, meta, warnings: outwarnings };
}
/**
* Need to take the trained pipeline if it exists and place it in the DIVE_Pipelines folder
Expand Down
6 changes: 3 additions & 3 deletions client/platform/desktop/backend/serializers/viame.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -239,9 +239,9 @@ describe('VIAME Python Compatibility Check', () => {
const trackArray = Object.values(trackData);
// eslint-disable-next-line no-await-in-loop
const results = await parse(csvStream);
expect(Object.values(results.tracks)).toEqual(trackArray);
expect(Object.values(results[0].tracks)).toEqual(trackArray);
// eslint-disable-next-line no-await-in-loop
const attData = processTrackAttributes(Object.values(results.tracks));
const attData = processTrackAttributes(Object.values(results[0].tracks));
expect(testAttributes).toEqual(attData.attributes);
}
});
Expand Down Expand Up @@ -314,7 +314,7 @@ describe('Test Image Filenames', () => {
} else {
// eslint-disable-next-line no-await-in-loop
const result = await parseFile(testPath, imageMap);
expect(Object.values(result.tracks).length).toBeGreaterThan(0);
expect(Object.values(result[0].tracks).length).toBeGreaterThan(0);
}
}
});
Expand Down
27 changes: 17 additions & 10 deletions client/platform/desktop/backend/serializers/viame.ts
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,7 @@ function _parseFeature(row: string[]) {
};
}

async function parse(input: Readable, imageMap?: Map<string, number>): Promise<AnnotationFileData> {
async function parse(input: Readable, imageMap?: Map<string, number>): Promise<[AnnotationFileData, string[]]> {
const parser = csvparser({
delimiter: ',',
// comment lines may not have the correct number of columns
Expand All @@ -253,8 +253,9 @@ async function parse(input: Readable, imageMap?: Map<string, number>): Promise<A
const foundImages: {image: string; frame: number; csvFrame: number}[] = [];
let error: Error | undefined;
let multiFrameTracks = false;
const warnings: string[] = [];

return new Promise<AnnotationFileData>((resolve, reject) => {
return new Promise<[AnnotationFileData, string[]]>((resolve, reject) => {
pipeline([input, parser], (err) => {
// undefined err indicates successful exit
if (err !== undefined) {
Expand All @@ -280,15 +281,17 @@ async function parse(input: Readable, imageMap?: Map<string, number>): Promise<A
}
const k = i + 1;
if (k < filteredImages.length) {
if (filteredImages[i].csvFrame + 1 !== filteredImages[k].csvFrame || filteredImages[i].frame + 1 !== filteredImages[k].frame) {
const itemDifference = foundImages[k].csvFrame - filteredImages[i].csvFrame;
if (
foundImages[i].csvFrame + itemDifference !== filteredImages[k].csvFrame || filteredImages[i].frame + itemDifference !== filteredImages[k].frame) {
// We have misaligned image sequences so we error out
error = new Error(`A subsampling of images were used with the CSV but they were not sequential\n
warnings.push(`A subsampling of images were used with the CSV but they were not sequential\n
${filteredImages[i].csvFrame + 1} !== ${filteredImages[k].csvFrame} || ${filteredImages[i].frame + 1} !== ${filteredImages[k].frame}\n
image1: ${filteredImages[i].image} image2: ${filteredImages[k].image} - these should be sequential in the CSV
\n`);
}
}
frameMapper[filteredImages[i].csvFrame] = i;
frameMapper[filteredImages[i].csvFrame] = filteredImages[i].frame;
minFrame = Math.min(minFrame, filteredImages[i].csvFrame);
maxFrame = Math.max(maxFrame, filteredImages[i].csvFrame);
}
Expand Down Expand Up @@ -347,7 +350,9 @@ async function parse(input: Readable, imageMap?: Map<string, number>): Promise<A
if (k < foundImages.length) {
if (foundImages[i].csvFrame > foundImages[k].csvFrame || foundImages[i].frame > foundImages[k].frame) {
// We have misaligned video sequences so we error out
error = new Error('Images were provided in an unexpected order and dataset contains multi-frame tracks.');
warnings.push(`Images were provided in an unexpected order and dataset contains multi-frame tracks.\n
image${i}: frame: ${foundImages[i].frame} csvFrame: ${foundImages[i].csvFrame}
image${k}: frame: ${foundImages[k].frame} csvFrame: ${foundImages[k].csvFrame}`);
}
}
}
Expand All @@ -357,7 +362,7 @@ async function parse(input: Readable, imageMap?: Map<string, number>): Promise<A
if (error !== undefined) {
reject(error);
}
resolve({ tracks, groups: {}, fps });
resolve([{ tracks, groups: {}, fps }, warnings]);
});
parser.on('readable', () => {
let record: string[];
Expand Down Expand Up @@ -407,8 +412,10 @@ async function parse(input: Readable, imageMap?: Map<string, number>): Promise<A
});
if (rowInfo.frame < maxFeatureFrame) {
// trackId was already in dataMap, and frame is out of order
error = new Error(
'annotations were provided in an unexpected order and dataset contains multi-frame tracks',
warnings.push(
`annotations were provided in an unexpected order and dataset contains multi-frame tracks:
id: ${rowInfo.id} filename: ${rowInfo.filename} frame: ${rowInfo.frame}
maxFeatureFrame: ${maxFeatureFrame}`,
);
// eslint-disable-next-line no-continue
continue;
Expand Down Expand Up @@ -446,7 +453,7 @@ async function parse(input: Readable, imageMap?: Map<string, number>): Promise<A
}

async function parseFile(path: string, imageMap?: Map<string, number>):
Promise<AnnotationFileData> {
Promise<[AnnotationFileData, string[]]> {
const stream = fs.createReadStream(path);
return parse(stream, imageMap);
}
Expand Down
2 changes: 1 addition & 1 deletion client/platform/desktop/frontend/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ function importMultiCam(args: MultiCamImportArgs):
}

// eslint-disable-next-line @typescript-eslint/no-unused-vars
function importAnnotationFile(id: string, path: string, _htmlFile = undefined, additive = false, additivePrepend = ''): Promise<boolean> {
function importAnnotationFile(id: string, path: string, _htmlFile = undefined, additive = false, additivePrepend = ''): Promise<boolean | string[]> {
return ipcRenderer.invoke('import-annotation', {
id, path, additive, additivePrepend,
});
Expand Down
7 changes: 6 additions & 1 deletion client/platform/web-girder/api/dataset.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ function makeViameFolder({
);
}

async function importAnnotationFile(parentId: string, path: string, file?: HTMLFile, additive = false, additivePrepend = '', set: string | undefined = undefined) {
async function importAnnotationFile(parentId: string, path: string, file?: HTMLFile, additive = false, additivePrepend = '', set: string | undefined = undefined): Promise<boolean | string[]> {
if (file === undefined) {
return false;
}
Expand All @@ -111,6 +111,11 @@ async function importAnnotationFile(parentId: string, path: string, file?: HTMLF
});
if (uploadResponse.status === 200) {
const final = await postProcess(parentId, true, false, additive, additivePrepend, set);
if (final.data.length > 1) {
const warnings = final.data[1];
return warnings;
}

return final.status === 200;
}
}
Expand Down
3 changes: 2 additions & 1 deletion client/platform/web-girder/api/rpc.service.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
import girderRest from 'platform/web-girder/plugins/girder';
import type { GirderModel } from '@girder/components/src';
import { Pipe } from 'dive-common/apispec';

function postProcess(folderId: string, skipJobs = false, skipTranscoding = false, additive = false, additivePrepend = '', set: string | undefined = undefined) {
return girderRest.post(`dive_rpc/postprocess/${folderId}`, null, {
return girderRest.post<[GirderModel, string[]]>(`dive_rpc/postprocess/${folderId}`, null, {
params: {
skipJobs, skipTranscoding, additive, additivePrepend, set,
},
Expand Down
9 changes: 5 additions & 4 deletions client/platform/web-girder/views/Export.vue
Original file line number Diff line number Diff line change
Expand Up @@ -319,15 +319,16 @@ export default defineComponent({
class="mt-2"
:disabled="!exportUrls.exportDetectionsUrl"
@click="doExport({ url: exportUrls
&& exportUrls.exportDetectionsUrlTrackJSON })" >
&& exportUrls.exportDetectionsUrlTrackJSON })"
>
<span
v-if="exportUrls.exportDetectionsUrl"
>DIVE TrackJSON</span>
<span
v-else
>detections unavailable</span>
</v-btn>
<!-- <v-btn
</v-btn>
<!-- <v-btn
depressed
block
:disabled="!exportUrls.exportDetectionsUrl"
Expand All @@ -336,7 +337,7 @@ export default defineComponent({
<span v-if="exportUrls.exportDetectionsUrl">annotations</span>
<span v-else>detections unavailable</span>
</v-btn> -->
</v-col>
</v-col>
</v-row>
</v-card-actions>

Expand Down
19 changes: 11 additions & 8 deletions server/dive_server/crud_rpc.py
Original file line number Diff line number Diff line change
Expand Up @@ -288,7 +288,7 @@ def run_training(
def _get_data_by_type(
file: types.GirderModel,
image_map: Optional[Dict[str, int]] = None,
) -> Optional[GetDataReturnType]:
) -> Optional[Tuple[GetDataReturnType, Optional[List[str]]]]:
"""
Given an arbitrary Girder file model, figure out what kind of file it is and
parse it appropriately.
Expand Down Expand Up @@ -325,10 +325,10 @@ def _get_data_by_type(

# Parse the file as the now known type
if as_type == crud.FileType.VIAME_CSV:
converted, attributes = viame.load_csv_as_tracks_and_attributes(
converted, attributes, warnings = viame.load_csv_as_tracks_and_attributes(
file_string.splitlines(), image_map
)
return {'annotations': converted, 'meta': None, 'attributes': attributes, 'type': as_type}
return {'annotations': converted, 'meta': None, 'attributes': attributes, 'type': as_type}, warnings
if as_type == crud.FileType.MEVA_KPF:
converted, attributes = kpf.convert(kpf.load(file_string))
return {'annotations': converted, 'meta': None, 'attributes': attributes, 'type': as_type}
Expand Down Expand Up @@ -374,6 +374,7 @@ def process_items(
folder,
user,
)
aggregate_warnings = []
for item in unprocessed_items:
file: Optional[types.GirderModel] = next(Item().childFiles(item), None)
if file is None:
Expand All @@ -383,7 +384,9 @@ def process_items(
image_map = None
if fromMeta(folder, constants.TypeMarker) == 'image-sequence':
image_map = crud.valid_image_names_dict(crud.valid_images(folder, user))
results = _get_data_by_type(file, image_map=image_map)
results, warnings = _get_data_by_type(file, image_map=image_map)
if warnings:
aggregate_warnings += warnings
except Exception as e:
Item().remove(item)
raise RestException(f'{file["name"]} was not a supported file type: {e}') from e
Expand Down Expand Up @@ -414,7 +417,7 @@ def process_items(
crud.saveImportAttributes(folder, results['attributes'], user)
if results['meta']:
crud_dataset.update_metadata(folder, results['meta'], False)

return aggregate_warnings

def postprocess(
user: types.GirderUserModel,
Expand All @@ -424,7 +427,7 @@ def postprocess(
additive=False,
additivePrepend='',
set='',
) -> types.GirderModel:
) -> Tuple[types.GirderModel, Optional[List[str]]]:
"""
Post-processing to be run after media/annotation import
Expand Down Expand Up @@ -539,8 +542,8 @@ def postprocess(

Folder().save(dsFolder)

process_items(dsFolder, user, additive, additivePrepend, set)
return dsFolder
aggregate_warnings = process_items(dsFolder, user, additive, additivePrepend, set)
return dsFolder, aggregate_warnings


def convert_large_image(
Expand Down
Loading

0 comments on commit 336304c

Please sign in to comment.