+
@@ -58,13 +57,9 @@
- {% autopaginate data %}
-
- {% for item in data %}
- {% include "dashboard/task.html" %}
- {% endfor %}
+ {% paginate %}
+
-
@@ -113,14 +108,6 @@
-
-
-
-
-
-
-
-
@@ -180,7 +167,7 @@
-
-
-
+
@@ -189,14 +176,16 @@
-
- * Licensed under the MIT License
- */
-/*
- * x10.js v0.1.3
- * Web worker wrapper with simple interface
- *
- * Copyright (c) 2013-2015, Hakan Bilgin
- * Licensed under the MIT License
- */
-
-(function(window, undefined) {
- //'use strict';
-
- var x10 = {
- init: function() {
- return this;
- },
- work_handler: function(event) {
- var args = Array.prototype.slice.call(event.data, 1),
- func = event.data[0],
- ret = tree[func].apply(tree, args);
-
- // return process finish
- postMessage([func, ret]);
- },
- setup: function(tree) {
- var url = window.URL || window.webkitURL,
- script = 'var tree = {'+ this.parse(tree).join(',') +'};',
- blob = new Blob([script + 'self.addEventListener("message", '+ this.work_handler.toString() +', false);'],
- {type: 'text/javascript'}),
- worker = new Worker(url.createObjectURL(blob));
-
- // thread pipe
- worker.onmessage = function(event) {
- var args = Array.prototype.slice.call(event.data, 1),
- func = event.data[0];
- x10.observer.emit('x10:'+ func, args);
- };
-
- return worker;
- },
- call_handler: function(func, worker) {
- return function() {
- var args = Array.prototype.slice.call(arguments, 0, -1),
- callback = arguments[arguments.length-1];
-
- // add method name
- args.unshift(func);
-
- // listen for 'done'
- x10.observer.on('x10:'+ func, function(event) {
- callback(event.detail[0]);
- });
-
- // start worker
- worker.postMessage(args);
- };
- },
- compile: function(hash) {
- var worker = this.setup(typeof(hash) === 'function' ? {func: hash} : hash),
- obj = {},
- fn;
- // create return object
- if (typeof(hash) === 'function') {
- obj.func = this.call_handler('func', worker);
- return obj.func;
- } else {
- for (fn in hash) {
- obj[fn] = this.call_handler(fn, worker);
- }
- return obj;
- }
- },
- parse: function(tree, isArray) {
- var hash = [],
- key,
- val,
- v;
-
- for (key in tree) {
- v = tree[key];
- // handle null
- if (v === null) {
- hash.push(key +':null');
- continue;
- }
- // handle undefined
- if (v === undefined) {
- hash.push(key +':undefined');
- continue;
- }
- switch (v.constructor) {
- case Date: val = 'new Date('+ v.valueOf() +')'; break;
- case Object: val = '{'+ this.parse(v).join(',') +'}'; break;
- case Array: val = '['+ this.parse(v, true).join(',') +']'; break;
- case String: val = '"'+ v.replace(/"/g, '\\"') +'"'; break;
- case RegExp:
- case Function: val = v.toString(); break;
- default: val = v;
- }
- if (isArray) hash.push(val);
- else hash.push(key +':'+ val);
- }
- return hash;
- },
- // simple event emitter
- observer: (function() {
- var stack = {};
-
- return {
- on: function(type, fn) {
- if (!stack[type]) {
- stack[type] = [];
- }
- stack[type].unshift(fn);
- },
- off: function(type, fn) {
- if (!stack[type]) return;
- var i = stack[type].indexOf(fn);
- stack[type].splice(i,1);
- },
- emit: function(type, detail) {
- if (!stack[type]) return;
- var event = {
- type : type,
- detail : detail,
- isCanceled : false,
- cancelBubble : function() {
- this.isCanceled = true;
- }
- },
- len = stack[type].length;
- while(len--) {
- if (event.isCanceled) return;
- stack[type][len](event);
- }
- }
- };
- })()
- };
-
- if (typeof module === "undefined") {
- // publish x10
- window.x10 = x10.init();
- } else {
- module.exports = x10.init();
- }
-
-})(this);
-
-
-(function(window, module, undefined) {
- 'use strict';
-
- var Defiant = {
- is_ie : /(msie|trident)/i.test(navigator.userAgent),
- is_safari : /safari/i.test(navigator.userAgent),
- env : 'production',
- xml_decl : '',
- namespace : 'xmlns:d="defiant-namespace"',
- tabsize : 4,
- render: function(template, data) {
- var processor = new XSLTProcessor(),
- span = document.createElement('span'),
- opt = {match: '/'},
- tmpltXpath,
- scripts,
- temp,
- sorter;
- // handle arguments
- switch (typeof(template)) {
- case 'object':
- this.extend(opt, template);
- if (!opt.data) opt.data = data;
- break;
- case 'string':
- opt.template = template;
- opt.data = data;
- break;
- default:
- throw 'error';
- }
- opt.data = JSON.toXML(opt.data);
- tmpltXpath = '//xsl:template[@name="'+ opt.template +'"]';
-
- if (!this.xsl_template) this.gatherTemplates();
-
- if (opt.sorter) {
- sorter = this.node.selectSingleNode(this.xsl_template, tmpltXpath +'//xsl:for-each//xsl:sort');
- if (sorter) {
- if (opt.sorter.order) sorter.setAttribute('order', opt.sorter.order);
- if (opt.sorter.select) sorter.setAttribute('select', opt.sorter.select);
- sorter.setAttribute('data-type', opt.sorter.type || 'text');
- }
- }
-
- temp = this.node.selectSingleNode(this.xsl_template, tmpltXpath);
- temp.setAttribute('match', opt.match);
- processor.importStylesheet(this.xsl_template);
- span.appendChild(processor.transformToFragment(opt.data, document));
- temp.removeAttribute('match');
-
- if (this.is_safari) {
- scripts = span.getElementsByTagName('script');
- for (var i=0, il=scripts.length; i'+ str.replace(/defiant:(\w+)/g, '$1') +'');
- },
- getSnapshot: function(data, callback) {
- return JSON.toXML(data, callback || true);
- },
- xmlFromString: function(str) {
- var parser,
- doc;
- str = str.replace(/>\s{1,}<');
- if (str.trim().match(/<\?xml/) === null) {
- str = this.xml_decl + str;
- }
- if ( 'ActiveXObject' in window ) {
- doc = new ActiveXObject('Msxml2.DOMDocument');
- doc.loadXML(str);
- doc.setProperty('SelectionNamespaces', this.namespace);
- if (str.indexOf('xsl:stylesheet') === -1) {
- doc.setProperty('SelectionLanguage', 'XPath');
- }
- } else {
- parser = new DOMParser();
- doc = parser.parseFromString(str, 'text/xml');
- }
- return doc;
- },
- extend: function(src, dest) {
- for (var content in dest) {
- if (!src[content] || typeof(dest[content]) !== 'object') {
- src[content] = dest[content];
- } else {
- this.extend(src[content], dest[content]);
- }
- }
- return src;
- },
- node: {}
- };
-
- // Export
- window.Defiant = module.exports = Defiant;
-
-})(
- typeof window !== 'undefined' ? window : {},
- typeof module !== 'undefined' ? module : {}
-);
-
-
-if (typeof(XSLTProcessor) === 'undefined') {
-
- // emulating XSLT Processor (enough to be used in defiant)
- var XSLTProcessor = function() {};
- XSLTProcessor.prototype = {
- importStylesheet: function(xsldoc) {
- this.xsldoc = xsldoc;
- },
- transformToFragment: function(data, doc) {
- var str = data.transformNode(this.xsldoc),
- span = document.createElement('span');
- span.innerHTML = str;
- return span;
- }
- };
-
-} else if (typeof(XSLTProcessor) !== 'function' && !XSLTProcessor) {
-
- // throw error
- throw 'XSLTProcessor transformNode not implemented';
-
-}
-
-
-// extending STRING
-if (!String.prototype.fill) {
- String.prototype.fill = function(i,c) {
- var str = this;
- c = c || ' ';
- for (; str.length/,
- rx_constructor : /<(.+?)( d:contr=".*?")>/,
- rx_namespace : / xmlns\:d="defiant\-namespace"/,
- rx_data : /(<.+?>)(.*?)(<\/d:data>)/i,
- rx_function : /function (\w+)/i,
- namespace : 'xmlns:d="defiant-namespace"',
- to_xml_str: function(tree) {
- return {
- str: this.hash_to_xml(null, tree),
- map: this.map
- };
- },
- hash_to_xml: function(name, tree, array_child) {
- var is_array = tree.constructor === Array,
- self = this,
- elem = [],
- attr = [],
- key,
- val,
- val_is_array,
- type,
- is_attr,
- cname,
- constr,
- cnName,
- i,
- il,
- fn = function(key, tree) {
- val = tree[key];
- if (val === null || val === undefined || val.toString() === 'NaN') val = null;
-
- is_attr = key.slice(0,1) === '@';
- cname = array_child ? name : key;
- if (cname == +cname && tree.constructor !== Object) cname = 'd:item';
- if (val === null) {
- constr = null;
- cnName = false;
- } else {
- constr = val.constructor;
- cnName = constr.toString().match(self.rx_function)[1];
- }
-
- if (is_attr) {
- attr.push( cname.slice(1) +'="'+ self.escape_xml(val) +'"' );
- if (cnName !== 'String') attr.push( 'd:'+ cname.slice(1) +'="'+ cnName +'"' );
- } else if (val === null) {
- elem.push( self.scalar_to_xml( cname, val ) );
- } else {
- switch (constr) {
- case Function:
- // if constructor is function, then it's not a JSON structure
- throw 'JSON data should not contain functions. Please check your structure.';
- /* falls through */
- case Object:
- elem.push( self.hash_to_xml( cname, val ) );
- break;
- case Array:
- if (key === cname) {
- val_is_array = val.constructor === Array;
- if (val_is_array) {
- i = val.length;
- while (i--) {
- if (val[i] === null || !val[i] || val[i].constructor === Array) val_is_array = true;
- if (!val_is_array && val[i].constructor === Object) val_is_array = true;
- }
- }
- elem.push( self.scalar_to_xml( cname, val, val_is_array ) );
- break;
- }
- /* falls through */
- case String:
- if (typeof(val) === 'string') {
- val = val.toString().replace(/\&/g, '&')
- .replace(/\r|\n/g, '
');
- }
- if (cname === '#text') {
- // prepare map
- self.map.push(tree);
- attr.push('d:mi="'+ self.map.length +'"');
- attr.push('d:constr="'+ cnName +'"');
- elem.push( self.escape_xml(val) );
- break;
- }
- /* falls through */
- case Number:
- case Boolean:
- if (cname === '#text' && cnName !== 'String') {
- // prepare map
- self.map.push(tree);
- attr.push('d:mi="'+ self.map.length +'"');
- attr.push('d:constr="'+ cnName +'"');
- elem.push( self.escape_xml(val) );
- break;
- }
- elem.push( self.scalar_to_xml( cname, val ) );
- break;
- }
- }
- };
- if (tree.constructor === Array) {
- i = 0;
- il = tree.length;
- for (; i'+ elem.join('') +''+ name +'>' : '/>' );
- },
- scalar_to_xml: function(name, val, override) {
- var attr = '',
- text,
- constr,
- cnName;
-
- // check whether the nodename is valid
- if (name.match(this.rx_validate_name) === null) {
- attr += ' d:name="'+ name +'"';
- name = 'd:name';
- override = false;
- }
- if (val === null || val.toString() === 'NaN') val = null;
- if (val === null) return '<'+ name +' d:constr="null"/>';
- if (val.length === 1 && val.constructor === Array && !val[0]) {
- return '<'+ name +' d:constr="null" d:type="ArrayItem"/>';
- }
- if (val.length === 1 && val[0].constructor === Object) {
-
- text = this.hash_to_xml(false, val[0]);
-
- var a1 = text.match(this.rx_node),
- a2 = text.match(this.rx_constructor);
- a1 = (a1 !== null)? a1[2]
- .replace(this.rx_namespace, '')
- .replace(/>/, '')
- .replace(/"\/$/, '"') : '';
- a2 = (a2 !== null)? a2[2] : '';
-
- text = text.match(this.rx_data);
- text = (text !== null)? text[2] : '';
-
- return '<'+ name + a1 +' '+ a2 +' d:type="ArrayItem">'+ text +''+ name +'>';
- } else if (val.length === 0 && val.constructor === Array) {
- return '<'+ name +' d:constr="Array"/>';
- }
- // else
- if (override) {
- return this.hash_to_xml( name, val, true );
- }
-
- constr = val.constructor;
- cnName = constr.toString().match(this.rx_function)[1];
- text = (constr === Array) ? this.hash_to_xml( 'd:item', val, true )
- : this.escape_xml(val);
-
- attr += ' d:constr="'+ cnName +'"';
- // prepare map
- this.map.push(val);
- attr += ' d:mi="'+ this.map.length +'"';
-
- return (name === '#text') ? this.escape_xml(val) : '<'+ name + attr +'>'+ text +''+ name +'>';
- },
- escape_xml: function(text) {
- return String(text) .replace(//g, '>')
- .replace(/"/g, '"')
- .replace(/ /g, ' ');
- }
- },
- processed,
- doc,
- task;
- // depending on request
- switch (typeof callback) {
- case 'function':
- // compile interpreter with 'x10.js'
- task = x10.compile(interpreter);
-
- // parse in a dedicated thread
- task.to_xml_str(tree, function(processed) {
- // snapshot distinctly improves performance
- callback({
- doc: Defiant.xmlFromString(processed.str),
- src: tree,
- map: processed.map
- });
- });
- return;
- case 'boolean':
- processed = interpreter.to_xml_str.call(interpreter, tree);
- // return snapshot
- return {
- doc: Defiant.xmlFromString(processed.str),
- src: tree,
- map: processed.map
- };
- default:
- processed = interpreter.to_xml_str.call(interpreter, tree);
- doc = Defiant.xmlFromString(processed.str);
-
- this.search.map = processed.map;
- return doc;
- }
- };
-}
-
-
-if (!JSON.search) {
- JSON.search = function(tree, xpath, single) {
- 'use strict';
-
- var isSnapshot = tree.doc && tree.doc.nodeType,
- doc = isSnapshot ? tree.doc : JSON.toXML(tree),
- map = isSnapshot ? tree.map : this.search.map,
- src = isSnapshot ? tree.src : tree,
- xres = Defiant.node[ single ? 'selectSingleNode' : 'selectNodes' ](doc, xpath.xTransform()),
- ret = [],
- mapIndex,
- i;
-
- if (single) xres = [xres];
- i = xres.length;
-
- while (i--) {
- switch(xres[i].nodeType) {
- case 2:
- case 3:
- ret.unshift( xres[i].nodeValue );
- break;
- default:
- mapIndex = +xres[i].getAttribute('d:mi');
- //if (map[mapIndex-1] !== false) {
- ret.unshift( map[mapIndex-1] );
- //}
- }
- }
-
- // if environment = development, add search tracing
- if (Defiant.env === 'development') {
- this.trace = JSON.mtrace(src, ret, xres);
- }
-
- return ret;
- };
-}
-
-if (!JSON.mtrace) {
- JSON.mtrace = function(root, hits, xres) {
- 'use strict';
-
- var win = window,
- stringify = JSON.stringify,
- sroot = stringify( root, null, '\t' ).replace(/\t/g, ''),
- trace = [],
- i = 0,
- il = xres.length,
- od = il ? xres[i].ownerDocument.documentElement : false,
- map = this.search.map,
- hstr,
- cConstr,
- fIndex = 0,
- mIndex,
- lStart,
- lEnd;
-
- for (; i 0)? xI[0] : null;
- } else {
- return XNode.selectSingleNode(XPath);
- }
-};
-
-
-Defiant.node.prettyPrint = function(node) {
- var root = Defiant,
- tabs = root.tabsize,
- decl = root.xml_decl.toLowerCase(),
- ser,
- xstr;
- if (root.is_ie) {
- xstr = node.xml;
- } else {
- ser = new XMLSerializer();
- xstr = ser.serializeToString(node);
- }
- if (root.env !== 'development') {
- // if environment is not development, remove defiant related info
- xstr = xstr.replace(/ \w+\:d=".*?"| d\:\w+=".*?"/g, '');
- }
- var str = xstr.trim().replace(/(>)\s*(<)(\/*)/g, '$1\n$2$3'),
- lines = str.split('\n'),
- indent = -1,
- i = 0,
- il = lines.length,
- start,
- end;
- for (; i/g) !== null;
- //start = lines[i].match(/<[^\/]+>/g) !== null;
- end = lines[i].match(/<\/[\w\:]+>/g) !== null;
- if (lines[i].match(/<.*?\/>/g) !== null) start = end = true;
- if (start) indent++;
- lines[i] = String().fill(indent, '\t') + lines[i];
- if (start && end) indent--;
- if (!start && end) indent--;
- }
- return lines.join('\n').replace(/\t/g, String().fill(tabs, ' '));
-};
-
-
-Defiant.node.toJSON = function(xnode, stringify) {
- 'use strict';
-
- var interpret = function(leaf) {
- var obj = {},
- win = window,
- attr,
- type,
- item,
- cname,
- cConstr,
- cval,
- text,
- i, il, a;
-
- switch (leaf.nodeType) {
- case 1:
- cConstr = leaf.getAttribute('d:constr');
- if (cConstr === 'Array') obj = [];
- else if (cConstr === 'String' && leaf.textContent === '') obj = '';
-
- attr = leaf.attributes;
- i = 0;
- il = attr.length;
- for (; i
+ * License GNU AGPLv3
+ */if(function(window,module){"use strict";var defiant={is_ie:/(msie|trident)/i.test(navigator.userAgent),is_safari:/safari/i.test(navigator.userAgent),env:"production",xml_decl:'',namespace:'xmlns:d="defiant-namespace"',tabsize:4,snapshots:{},node:{},renderXml:function(e,t){var n=new window.XSLTProcessor,r=document.createElement("span"),a='//xsl:template[@name="'+e+'"]',s=this.node.selectSingleNode(this.xsl_template,a);return(s=this.node.selectSingleNode(this.xsl_template,a)).setAttribute("match","/"),n.importStylesheet(this.xsl_template),r.appendChild(n.transformToFragment(t,document)),s.removeAttribute("match"),r.innerHTML},render:function(e,t){var n,r,a,s,o=new window.XSLTProcessor,i=document.createElement("span"),l={match:"/"};switch(typeof e){case"object":this.extend(l,e),l.data||(l.data=t);break;case"string":l.template=e,l.data=t;break;default:throw"error"}if(l.data=l.data.nodeType?l.data:defiant.json.toXML(l.data),n='//xsl:template[@name="'+l.template+'"]',this.xsl_template||this.gatherTemplates(),l.sorter&&(s=this.node.selectSingleNode(this.xsl_template,n+"//xsl:for-each//xsl:sort"))&&(l.sorter.order&&s.setAttribute("order",l.sorter.order),l.sorter.select&&s.setAttribute("select",l.sorter.select),s.setAttribute("data-type",l.sorter.type||"text")),(a=this.node.selectSingleNode(this.xsl_template,n)).setAttribute("match",l.match),o.importStylesheet(this.xsl_template),i.appendChild(o.transformToFragment(l.data,document)),a.removeAttribute("match"),this.is_safari)for(var c=0,d=(r=i.getElementsByTagName("script")).length;c"+t.replace(/defiant:(\w+)/g,"$1")+"")},registerTemplate:function(e){this.xsl_template=this.xmlFromString('"+e.replace(/defiant:(\w+)/g,"$1")+" ")},getSnapshot:function(e,t){return this.json.toXML(e,t||!0)},createSnapshot:function(e,t){var n=this,r="snapshot_"+Date.now();this.json.toXML(e,function(e){n.snapshots[r]=e,t(r)})},getFacets:function(e,t){var n,r,a,s,o,i,l=e.constructor===String&&"snapshot_"===e.slice(0,9)?this.snapshots[e].doc:defiant.json.toXML(e),c=l.cloneNode(!0),d={},u={},p=0,h=function(e){var t=e.childNodes.length;switch(e.nodeType){case 1:t>=p&&(p=t,r=e);case 9:e.childNodes.map(function(e){return h(e)})}};for(i in h(l),r.childNodes.map(function(e){u[e.nodeName]||(u[e.nodeName]=1),u[e.nodeName]++}),p=0,u)p<=u[i]&&(p=u[i],o=i);return this.createFacetTemplate(t),s=defiant.node.selectSingleNode(c,'//*[@d:mi="'+r.getAttribute("d:mi")+'"]'),defiant.node.selectNodes(c,'//*[@d:mi="'+r.getAttribute("d:mi")+'"]/'+o).map(function(e){return e.parentNode.removeChild(e)}),a=defiant.node.selectNodes(l,'//*[@d:mi="'+r.getAttribute("d:mi")+'"]/'+o),n=a.length-1,a.map(function(e,t){if(s.appendChild(e.cloneNode(!0)),t%50==49||t===n){var a=defiant.render("facets",c).replace(/\n|\t/g,"").replace(/"": 0,?/g,"").replace(/,\}/g,"}"),i=JSON.parse(a);d=defiant.concatFacet(i,d),defiant.node.selectNodes(c,'//*[@d:mi="'+r.getAttribute("d:mi")+'"]/'+o).map(function(e){return e.parentNode.removeChild(e)})}}),d},createFacetTemplate:function(e){var t,n,r=[],a=[];for(n in e)r.push(' '),a.push('"'+n+'": {" ": '+', }'.replace(/\n|\t/g,""));t=r.join("")+'{'+a.join(",")+"} ",this.registerTemplate(t)},xmlFromString:function(e){var t;return null===(e=e.replace(/>\s{1,}<")).trim().match(/<\?xml/)&&(e=this.xml_decl+e),"ActiveXObject"in window?((t=new ActiveXObject("Msxml2.DOMDocument")).loadXML(e),t.setProperty("SelectionNamespaces",this.namespace),-1===e.indexOf("xsl:stylesheet")&&t.setProperty("SelectionLanguage","XPath")):t=(new DOMParser).parseFromString(e,"text/xml"),t},concatFacet:function(e,t){for(var n in t)e[n]&&"object"==typeof t[n]?this.concatFacet(e[n],t[n]):e[n]=(e[n]||0)+t[n];return e},extend:function(e,t){for(var n in t)e[n]&&"object"==typeof t[n]?this.extend(e[n],t[n]):e[n]=t[n];return e},node:{selectNodes:function(e,t){if(e.evaluate){for(var n=e.createNSResolver(e.documentElement),r=e.evaluate(t,e,n,XPathResult.ORDERED_NODE_SNAPSHOT_TYPE,null),a=[],s=0,o=r.snapshotLength;s0?n[0]:null}return e.selectSingleNode(t)},prettyPrint:function(e){var t,n=defiant,r=n.tabsize,a=n.xml_decl.toLowerCase();t=n.is_ie?e.xml:(new XMLSerializer).serializeToString(e),"development"!==n.env&&(t=t.replace(/ \w+\:d=".*?"| d\:\w+=".*?"/g,""));for(var s,o,i=t.trim().replace(/(>)\s*(<)(\/*)/g,"$1\n$2$3").split("\n"),l=-1,c=0,d=i.length;c/g),o=null!==i[c].match(/<\/[\w\:]+>/g),null!==i[c].match(/<.*?\/>/g)&&(s=o=!0),s&&l++,i[c]=String().fill(l,"\t")+i[c],s&&o&&l--,!s&&o&&l--);return i.join("\n").replace(/\t/g,String().fill(r," "))},toJSON:function(e,t){var n=function(e){var t,r,a,s,o,i,l,c,d,u,p={},h=window;switch(e.nodeType){case 1:for("Array"===(o=e.getAttribute("d:constr"))?p=[]:"String"===o&&""===e.textContent&&(p=""),c=0,d=(t=e.attributes).length;c/,rx_constructor:/<(.+?)( d:contr=".*?")>/,rx_namespace:/ xmlns\:d="defiant\-namespace"/,rx_data:/(<.+?>)(.*?)(<\/d:data>)/i,rx_function:/function (\w+)/i,namespace:'xmlns:d="defiant-namespace"',to_xml_str:function(e){return{str:this.hash_to_xml(null,e),map:this.map}},hash_to_xml:function(e,t,n){var r,a,s,o,i,l,c,d,u,p=t.constructor===Array,h=this,m=[],f=[],g=function(t,r){if(null!==(a=r[t])&&void 0!==a&&"NaN"!==a.toString()||(a=null),o="@"===t.slice(0,1),(i=n?e:t)==+i&&r.constructor!==Object&&(i="d:item"),null===a?(l=null,c=!1):(l=a.constructor,c=l.toString().match(h.rx_function)[1]),o)f.push(i.slice(1)+'="'+h.escape_xml(a)+'"'),"String"!==c&&f.push("d:"+i.slice(1)+'="'+c+'"');else if(null===a)m.push(h.scalar_to_xml(i,a));else switch(l){case Function:throw"JSON data should not contain functions. Please check your structure.";case Object:m.push(h.hash_to_xml(i,a));break;case Array:if(t===i){if(s=a.constructor===Array)for(d=a.length;d--;)null!==a[d]&&a[d]&&a[d].constructor!==Array||(s=!0),s||a[d].constructor!==Object||(s=!0);m.push(h.scalar_to_xml(i,a,s));break}case String:if("string"==typeof a&&(a=a.toString().replace(/\&/g,"&").replace(/\r|\n/g,"
")),"#text"===i){h.map.push(r),f.push('d:mi="'+h.map.length+'"'),f.push('d:constr="'+c+'"'),m.push(h.escape_xml(a));break}case Number:case Boolean:if("#text"===i&&"String"!==c){h.map.push(r),f.push('d:mi="'+h.map.length+'"'),f.push('d:constr="'+c+'"'),m.push(h.escape_xml(a));break}m.push(h.scalar_to_xml(i,a))}};if(t.constructor===Array)for(d=0,u=t.length;d"+m.join("")+""+e+">":"/>"))},scalar_to_xml:function(e,t,n){var r,a,s,o="";if(null===e.match(this.rx_validate_name)&&(o+=' d:name="'+e+'"',e="d:name",n=!1),null!==t&&"NaN"!==t.toString()||(t=null),null===t)return"<"+e+' d:constr="null"/>';if(1===t.length&&t.constructor===Array&&!t[0])return"<"+e+' d:constr="null" d:type="ArrayItem"/>';if(1===t.length&&t[0].constructor===Object){var i=(r=this.hash_to_xml(!1,t[0])).match(this.rx_node),l=r.match(this.rx_constructor);return"<"+e+(i=null!==i?i[2].replace(this.rx_namespace,"").replace(/>/,"").replace(/"\/$/,'"'):"")+" "+(l=null!==l?l[2]:"")+' d:type="ArrayItem">'+(r=null!==(r=r.match(this.rx_data))?r[2]:"")+""+e+">"}return 0===t.length&&t.constructor===Array?"<"+e+' d:constr="Array"/>':n?this.hash_to_xml(e,t,!0):(s=(a=t.constructor).toString().match(this.rx_function)[1],r=a===Array?this.hash_to_xml("d:item",t,!0):this.escape_xml(t),o+=' d:constr="'+s+'"',this.map.push(t),o+=' d:mi="'+this.map.length+'"',"#text"===e?this.escape_xml(t):"<"+e+o+">"+r+""+e+">")},escape_xml:function(e){return String(e).replace(/&/g,"&").replace(//g,">").replace(/"/g,""").replace(/ /g," ")}},toXML:function(e,t){var n,r,a=defiant.json.interpreter;switch(typeof t){case"function":return void defiant.compiled.to_xml_str(e,function(n){t({doc:defiant.xmlFromString(n.str),src:e,map:n.map})});case"boolean":return n=a.to_xml_str.call(a,e),{doc:defiant.xmlFromString(n.str),src:e,map:n.map};default:return n=a.to_xml_str.call(a,e),r=defiant.xmlFromString(n.str),this.search.map=n.map,r}},search:function(e,t,n){e.constructor===String&&"snapshot_"===e.slice(0,9)&&defiant.snapshots[e]&&(e=defiant.snapshots[e]);var r,a,s=defiant.json,o=e.doc&&e.doc.nodeType,i=o?e.doc:s.toXML(e),l=o?e.map:s.search.map,c=o?e.src:e,d=defiant.node[n?"selectSingleNode":"selectNodes"](i,t.xTransform()),u=[];for(n&&(d=[d]),a=d.length;a--;)switch(d[a].nodeType){case 2:case 3:u.unshift(d[a].nodeValue);break;default:r=+d[a].getAttribute("d:mi"),u.unshift(l[r-1])}return"development"===defiant.env&&(u.trace=s.matchTrace(c,u,d)),u},matchTrace:function(e,t,n){var r=[],a=0,s=window,o=defiant.node.toJSON,i=function(e){return JSON.stringify(e,null,"\t").replace(/\t/g,"")},l=i(e);return n.map(function(e,c){var d,u,p,h,m,f,g,x=0;switch(e.nodeType){case 2:d=n[c].ownerElement?n[c].ownerElement.getAttribute("d:"+n[c].nodeName):"String",h=s[d](t[c]),m='"@'+n[c].nodeName+'": '+h,f=l.indexOf(m,a);break;case 3:d=n[c].parentNode.getAttribute("d:constr"),h=s[d](t[c]),m='"'+n[c].parentNode.nodeName+'": '+("Number"===m?h:'"'+h+'"'),f=l.indexOf(m,a);break;default:d=e.getAttribute("d:constr"),["String","Number"].indexOf(d)>-1?(u=o(n[c].parentNode),p=i(u),h=s[d](t[c]),m='"'+n[c].nodeName+'": '+("Number"===d?h:'"'+h+'"'),f=l.indexOf(p,a)+p.indexOf(m)):(m=i(t[c]),f=l.indexOf(m),x=m.split("\n").length-1)}a=f+1,g=l.slice(0,f).split("\n").length,r.push([g,x])}),r}}},x10={id:1,work_handler:function(e){var t=Array.prototype.slice.call(e.data,2),n=e.data[0],r=e.data[1],a=tree[n].apply(tree,t);a.map=JSON.parse(JSON.stringify(a.map)),postMessage([r,n,a])},setup:function(e){var t=window.URL||window.webkitURL,n="var tree = {"+this.parse(e).join(",")+"};",r=new Blob([n+'self.addEventListener("message", '+this.work_handler.toString()+", false);"],{type:"text/javascript"}),a=new Worker(t.createObjectURL(r));return a.onmessage=function(e){var t=Array.prototype.slice.call(e.data,2),n=e.data[0],r=e.data[1];x10.observer.emit("x10:"+r+n,t),x10.observer.off("x10:"+r+n)},a},call_handler:function(e,t){return function(){var n=Array.prototype.slice.call(arguments,0,-1),r=arguments[arguments.length-1],a=x10.id++;n.unshift(a),n.unshift(e),x10.observer.on("x10:"+e+a,function(e){r(e.detail[0])}),t.postMessage(n)}},compile:function(e){var t,n=this.setup("function"==typeof e?{func:e}:e),r={};if("function"==typeof e)return r.func=this.call_handler("func",n),r.func;for(t in e)r[t]=this.call_handler(t,n);return r},parse:function(e,t){var n,r,a,s=[];for(n in e)if(null!==(a=e[n]))if(void 0!==a){switch(a.constructor){case Date:r="new Date("+a.valueOf()+")";break;case Object:r="{"+this.parse(a).join(",")+"}";break;case Array:r="["+this.parse(a,!0).join(",")+"]";break;case String:r='"'+a.replace(/"/g,'\\"')+'"';break;case RegExp:case Function:r=a.toString();break;default:r=a}t?s.push(r):s.push(n+":"+r)}else s.push(n+":undefined");else s.push(n+":null");return s},observer:(stack={},{on:function(e,t){stack[e]||(stack[e]=[]),stack[e].unshift(t)},off:function(e,t){if(stack[e]){var n=stack[e].indexOf(t);stack[e].splice(n,1)}},emit:function(e,t){if(stack[e])for(var n={type:e,detail:t,isCanceled:!1,cancelBubble:function(){this.isCanceled=!0}},r=stack[e].length;r--;){if(n.isCanceled)return;stack[e][r](n)}}})},stack;String.prototype.fill||(String.prototype.fill=function(e,t){var n=this;for(t=t||" ";n.length im_w || ytl > im_h || xbr > im_w || ybr > im_h) {
- let message = `Incorrect bb found in annotation file: xtl=${xtl} ytl=${ytl} xbr=${xbr} ybr=${ybr}. `;
- message += `Box out of range: ${im_w}x${im_h}`;
+ if (xtl < 0 || ytl < 0 || xbr < 0 || ybr < 0
+ || xtl > imWidth || ytl > imHeight || xbr > imWidth || ybr > imHeight) {
+ const message = `Incorrect bb found in annotation file: xtl=${xtl} `
+ + `ytl=${ytl} xbr=${xbr} ybr=${ybr}. `
+ + `Box out of range: ${imWidth}x${imHeight}`;
throw Error(message);
}
if (this._flipped) {
- let _xtl = im_w - xbr;
- let _xbr = im_w - xtl;
- let _ytl = im_h - ybr;
- let _ybr = im_h - ytl;
- xtl = _xtl;
- ytl = _ytl;
- xbr = _xbr;
- ybr = _ybr;
+ [xtl, ytl, xbr, ybr] = [
+ imWidth - xbr,
+ imWidth - xtl,
+ imHeight - ybr,
+ imHeight - ytl,
+ ];
}
- let occluded = +box.getAttribute('occluded');
- let z_order = box.getAttribute('z_order') || '0';
- return [xtl, ytl, xbr, ybr, occluded, +z_order];
+ const occluded = box.getAttribute('occluded');
+ const zOrder = box.getAttribute('z_order') || '0';
+ return [[xtl, ytl, xbr, ybr], +occluded, +zOrder];
}
_getPolyPosition(shape, frame) {
- frame = Math.min(frame - this._startFrame, this._im_meta['original_size'].length - 1);
- let im_w = this._im_meta['original_size'][frame].width;
- let im_h = this._im_meta['original_size'][frame].height;
+ frame = Math.min(frame - this._startFrame, this._im_meta.length - 1);
+ const imWidth = this._im_meta[frame].width;
+ const imHeight = this._im_meta[frame].height;
let points = shape.getAttribute('points').split(';').join(' ');
points = PolyShapeModel.convertStringToNumberArray(points);
- for (let point of points) {
- if (point.x < 0 || point.y < 0 || point.x > im_w || point.y > im_h) {
- let message = `Incorrect point found in annotation file x=${point.x} y=${point.y}. `;
- message += `Point out of range ${im_w}x${im_h}`;
+ for (const point of points) {
+ if (point.x < 0 || point.y < 0 || point.x > imWidth || point.y > imHeight) {
+ const message = `Incorrect point found in annotation file x=${point.x} `
+ + `y=${point.y}. Point out of range ${imWidth}x${imHeight}`;
throw Error(message);
}
if (this._flipped) {
- point.x = im_w - point.x;
- point.y = im_h - point.y;
+ point.x = imWidth - point.x;
+ point.y = imHeight - point.y;
}
}
- points = PolyShapeModel.convertNumberArrayToString(points);
- let occluded = +shape.getAttribute('occluded');
- let z_order = shape.getAttribute('z_order') || '0';
- return [points, occluded, +z_order];
+ points = points.reduce((acc, el) => {
+ acc.push(el.x, el.y);
+ return acc;
+ }, []);
+
+ const occluded = shape.getAttribute('occluded');
+ const zOrder = shape.getAttribute('z_order') || '0';
+ return [points, +occluded, +zOrder];
}
_getAttribute(labelId, attrTag) {
- let name = attrTag.getAttribute('name');
- let attrId = this._labelsInfo.attrIdOf(labelId, name);
+ const name = attrTag.getAttribute('name');
+ const attrId = this._labelsInfo.attrIdOf(labelId, name);
if (attrId === null) {
- throw Error('An unknown attribute found in the annotation file: ' + name);
+ throw Error(`An unknown attribute found in the annotation file: ${name}`);
}
- let attrInfo = this._labelsInfo.attrInfo(attrId);
- let value = this._labelsInfo.strToValues(attrInfo.type, attrTag.textContent)[0];
+ const attrInfo = this._labelsInfo.attrInfo(attrId);
+ const value = LabelsInfo.normalize(attrInfo.type, attrTag.textContent);
if (['select', 'radio'].includes(attrInfo.type) && !attrInfo.values.includes(value)) {
- throw Error('Incorrect attribute value found for "' + name + '" attribute: ' + value);
- }
- else if (attrInfo.type === 'number') {
- if (isNaN(+value)) {
- throw Error('Incorrect attribute value found for "' + name + '" attribute: ' + value + '. Value must be a number.');
- }
- else {
- let min = +attrInfo.values[0];
- let max = +attrInfo.values[1];
+ throw Error(`Incorrect attribute value found for "${name}" + attribute: "${value}"`);
+ } else if (attrInfo.type === 'number') {
+ if (Number.isNaN(+value)) {
+ throw Error(`Incorrect attribute value found for "${name}" attribute: "${value}". Value must be a number.`);
+ } else {
+ const min = +attrInfo.values[0];
+ const max = +attrInfo.values[1];
if (+value < min || +value > max) {
- throw Error('Number attribute value out of range for "' + name +'" attribute: ' + value);
+ throw Error(`Number attribute value out of range for "${name}" attribute: "${value}"`);
}
}
}
@@ -115,46 +114,48 @@ class AnnotationParser {
}
_getAttributeList(shape, labelId) {
- let attributeDict = {};
- let attributes = shape.getElementsByTagName('attribute');
- for (let attribute of attributes ) {
- let [id, value] = this._getAttribute(labelId, attribute);
+ const attributeDict = {};
+ const attributes = shape.getElementsByTagName('attribute');
+ for (const attribute of attributes) {
+ const [id, value] = this._getAttribute(labelId, attribute);
attributeDict[id] = value;
}
- let attributeList = [];
- for (let attrId in attributeDict) {
- attributeList.push({
- id: attrId,
- value: attributeDict[attrId],
- });
+ const attributeList = [];
+ for (const attrId in attributeDict) {
+ if (Object.prototype.hasOwnProperty.call(attributeDict, attrId)) {
+ attributeList.push({
+ spec_id: attrId,
+ value: attributeDict[attrId],
+ });
+ }
}
return attributeList;
}
- _getShapeFromPath(shape_type, tracks) {
- let result = [];
- for (let track of tracks) {
- let label = track.getAttribute('label');
- let group_id = track.getAttribute('group_id') || '0';
- let labelId = this._labelsInfo.labelIdOf(label);
+ _getShapeFromPath(shapeType, tracks) {
+ const result = [];
+ for (const track of tracks) {
+ const label = track.getAttribute('label');
+ const group = track.getAttribute('group_id') || '0';
+ const labelId = this._labelsInfo.labelIdOf(label);
if (labelId === null) {
throw Error(`An unknown label found in the annotation file: ${label}`);
}
- let shapes = Array.from(track.getElementsByTagName(shape_type));
- shapes.sort((a,b) => +a.getAttribute('frame') - + b.getAttribute('frame'));
+ const shapes = Array.from(track.getElementsByTagName(shapeType));
+ shapes.sort((a, b) => +a.getAttribute('frame') - +b.getAttribute('frame'));
while (shapes.length && +shapes[0].getAttribute('outside')) {
shapes.shift();
}
if (shapes.length === 2) {
- if (shapes[1].getAttribute('frame') - shapes[0].getAttribute('frame') === 1 &&
- !+shapes[0].getAttribute('outside') && +shapes[1].getAttribute('outside')) {
+ if (shapes[1].getAttribute('frame') - shapes[0].getAttribute('frame') === 1
+ && !+shapes[0].getAttribute('outside') && +shapes[1].getAttribute('outside')) {
shapes[0].setAttribute('label', label);
- shapes[0].setAttribute('group_id', group_id);
+ shapes[0].setAttribute('group_id', group);
result.push(shapes[0]);
}
}
@@ -164,87 +165,93 @@ class AnnotationParser {
}
_parseAnnotationData(xml) {
- let data = {
+ const data = {
boxes: [],
polygons: [],
polylines: [],
- points: []
+ points: [],
};
- let tracks = xml.getElementsByTagName('track');
- let parsed = {
- boxes: this._getShapeFromPath('box', tracks),
- polygons: this._getShapeFromPath('polygon', tracks),
- polylines: this._getShapeFromPath('polyline', tracks),
+ const tracks = xml.getElementsByTagName('track');
+ const parsed = {
+ box: this._getShapeFromPath('box', tracks),
+ polygon: this._getShapeFromPath('polygon', tracks),
+ polyline: this._getShapeFromPath('polyline', tracks),
points: this._getShapeFromPath('points', tracks),
};
+ const shapeTarget = {
+ box: 'boxes',
+ polygon: 'polygons',
+ polyline: 'polylines',
+ points: 'points',
+ };
- let images = xml.getElementsByTagName('image');
- for (let image of images) {
- let frame = image.getAttribute('id');
+ const images = xml.getElementsByTagName('image');
+ for (const image of images) {
+ const frame = image.getAttribute('id');
- for (let box of image.getElementsByTagName('box')) {
+ for (const box of image.getElementsByTagName('box')) {
box.setAttribute('frame', frame);
- parsed.boxes.push(box);
+ parsed.box.push(box);
}
- for (let polygon of image.getElementsByTagName('polygon')) {
+ for (const polygon of image.getElementsByTagName('polygon')) {
polygon.setAttribute('frame', frame);
- parsed.polygons.push(polygon);
+ parsed.polygon.push(polygon);
}
- for (let polyline of image.getElementsByTagName('polyline')) {
+ for (const polyline of image.getElementsByTagName('polyline')) {
polyline.setAttribute('frame', frame);
- parsed.polylines.push(polyline);
+ parsed.polyline.push(polyline);
}
- for (let points of image.getElementsByTagName('points')) {
+ for (const points of image.getElementsByTagName('points')) {
points.setAttribute('frame', frame);
parsed.points.push(points);
}
}
- for (let shape_type in parsed) {
- for (let shape of parsed[shape_type]) {
- let frame = +shape.getAttribute('frame');
- if (frame < this._startFrame || frame > this._stopFrame) continue;
+ for (const shapeType in parsed) {
+ if (Object.prototype.hasOwnProperty.call(parsed, shapeType)) {
+ for (const shape of parsed[shapeType]) {
+ const frame = +shape.getAttribute('frame');
+ if (frame < this._startFrame || frame > this._stopFrame) {
+ continue;
+ }
- let labelId = this._labelsInfo.labelIdOf(shape.getAttribute('label'));
- let groupId = shape.getAttribute('group_id') || "0";
- if (labelId === null) {
- throw Error('An unknown label found in the annotation file: ' + shape.getAttribute('label'));
- }
+ const labelId = this._labelsInfo.labelIdOf(shape.getAttribute('label'));
+ const group = shape.getAttribute('group_id') || '0';
+ if (labelId === null) {
+ throw Error(`An unknown label found in the annotation file: "${shape.getAttribute('label')}"`);
+ }
- let attributeList = this._getAttributeList(shape, labelId);
-
- if (shape_type === 'boxes') {
- let [xtl, ytl, xbr, ybr, occluded, z_order] = this._getBoxPosition(shape, frame);
- data.boxes.push({
- label_id: labelId,
- group_id: +groupId,
- frame: frame,
- occluded: occluded,
- xtl: xtl,
- ytl: ytl,
- xbr: xbr,
- ybr: ybr,
- z_order: z_order,
- attributes: attributeList,
- id: this._idGen.next(),
- });
- }
- else {
- let [points, occluded, z_order] = this._getPolyPosition(shape, frame);
- data[shape_type].push({
- label_id: labelId,
- group_id: +groupId,
- frame: frame,
- points: points,
- occluded: occluded,
- z_order: z_order,
- attributes: attributeList,
- id: this._idGen.next(),
- });
+ const attributeList = this._getAttributeList(shape, labelId);
+
+ if (shapeType === 'box') {
+ const [points, occluded, zOrder] = this._getBoxPosition(shape, frame);
+ data[shapeTarget[shapeType]].push({
+ label_id: labelId,
+ group: +group,
+ attributes: attributeList,
+ type: 'rectangle',
+ z_order: zOrder,
+ frame,
+ occluded,
+ points,
+ });
+ } else {
+ const [points, occluded, zOrder] = this._getPolyPosition(shape, frame);
+ data[shapeTarget[shapeType]].push({
+ label_id: labelId,
+ group: +group,
+ attributes: attributeList,
+ type: shapeType,
+ z_order: zOrder,
+ frame,
+ points,
+ occluded,
+ });
+ }
}
}
}
@@ -253,76 +260,81 @@ class AnnotationParser {
}
_parseInterpolationData(xml) {
- let data = {
+ const data = {
box_paths: [],
polygon_paths: [],
polyline_paths: [],
- points_paths: []
+ points_paths: [],
};
- let tracks = xml.getElementsByTagName('track');
- for (let track of tracks) {
- let labelId = this._labelsInfo.labelIdOf(track.getAttribute('label'));
- let groupId = track.getAttribute('group_id') || '0';
+ const tracks = xml.getElementsByTagName('track');
+ for (const track of tracks) {
+ const labelId = this._labelsInfo.labelIdOf(track.getAttribute('label'));
+ const group = track.getAttribute('group_id') || '0';
if (labelId === null) {
- throw Error('An unknown label found in the annotation file: ' + name);
+ throw Error(`An unknown label found in the annotation file: "${track.getAttribute('label')}"`);
}
- let parsed = {
- boxes: Array.from(track.getElementsByTagName('box')),
- polygons: Array.from(track.getElementsByTagName('polygon')),
- polylines: Array.from(track.getElementsByTagName('polyline')),
+ const parsed = {
+ box: Array.from(track.getElementsByTagName('box')),
+ polygon: Array.from(track.getElementsByTagName('polygon')),
+ polyline: Array.from(track.getElementsByTagName('polyline')),
points: Array.from(track.getElementsByTagName('points')),
};
- for (let shape_type in parsed) {
- let shapes = parsed[shape_type];
- shapes.sort((a,b) => +a.getAttribute('frame') - + b.getAttribute('frame'));
+ for (const shapeType in parsed) {
+ if (Object.prototype.hasOwnProperty.call(parsed, shapeType)) {
+ const shapes = parsed[shapeType];
+ shapes.sort((a, b) => +a.getAttribute('frame') - +b.getAttribute('frame'));
- while (shapes.length && +shapes[0].getAttribute('outside')) {
- shapes.shift();
- }
+ while (shapes.length && +shapes[0].getAttribute('outside')) {
+ shapes.shift();
+ }
- if (shapes.length === 2) {
- if (shapes[1].getAttribute('frame') - shapes[0].getAttribute('frame') === 1 &&
- !+shapes[0].getAttribute('outside') && +shapes[1].getAttribute('outside')) {
- parsed[shape_type] = []; // pseudo interpolation track (actually is annotation)
+ if (shapes.length === 2) {
+ if (shapes[1].getAttribute('frame') - shapes[0].getAttribute('frame') === 1
+ && !+shapes[0].getAttribute('outside') && +shapes[1].getAttribute('outside')) {
+ // pseudo interpolation track (actually is annotation)
+ parsed[shapeType] = [];
+ }
}
}
}
- let type = null, target = null;
- if (parsed.boxes.length) {
- type = 'boxes';
+ let type = null;
+ let target = null;
+ if (parsed.box.length) {
+ type = 'box';
target = 'box_paths';
- }
- else if (parsed.polygons.length) {
- type = 'polygons';
+ } else if (parsed.polygon.length) {
+ type = 'polygon';
target = 'polygon_paths';
- }
- else if (parsed.polylines.length) {
- type = 'polylines';
+ } else if (parsed.polyline.length) {
+ type = 'polyline';
target = 'polyline_paths';
- }
- else if (parsed.points.length) {
+ } else if (parsed.points.length) {
type = 'points';
target = 'points_paths';
+ } else {
+ continue;
}
- else continue;
- let path = {
+ const path = {
label_id: labelId,
- group_id: +groupId,
+ group: +group,
frame: +parsed[type][0].getAttribute('frame'),
attributes: [],
shapes: [],
- id: this._idGen.next(),
};
- for (let shape of parsed[type]) {
- let keyFrame = +shape.getAttribute('keyframe');
- let outside = +shape.getAttribute('outside');
- let frame = +shape.getAttribute('frame');
+ if (path.frame < this._startFrame || path.frame > this._stopFrame) {
+ continue;
+ }
+
+ for (const shape of parsed[type]) {
+ const keyFrame = +shape.getAttribute('keyframe');
+ const outside = +shape.getAttribute('outside');
+ const frame = +shape.getAttribute('frame');
/*
All keyframes are significant.
@@ -330,53 +342,53 @@ class AnnotationParser {
Ignore all frames less then start.
Ignore all frames more then stop.
*/
- let significant = keyFrame || frame === this._startFrame;
+ const significant = (keyFrame || frame === this._startFrame)
+ && frame >= this._startFrame && frame <= this._stopFrame;
if (significant) {
- let attributeList = this._getAttributeList(shape, labelId);
- let shapeAttributes = [];
- let pathAttributes = [];
+ const attributeList = this._getAttributeList(shape, labelId);
+ const shapeAttributes = [];
+ const pathAttributes = [];
- for (let attr of attributeList) {
- let attrInfo = this._labelsInfo.attrInfo(attr.id);
+ for (const attr of attributeList) {
+ const attrInfo = this._labelsInfo.attrInfo(attr.spec_id);
if (attrInfo.mutable) {
shapeAttributes.push({
- id: attr.id,
+ spec_id: attr.spec_id,
value: attr.value,
});
- }
- else {
+ } else {
pathAttributes.push({
- id: attr.id,
+ spec_id: attr.spec_id,
value: attr.value,
});
}
}
path.attributes = pathAttributes;
- if (type === 'boxes') {
- let [xtl, ytl, xbr, ybr, occluded, z_order] = this._getBoxPosition(shape, Math.clamp(frame, this._startFrame, this._stopFrame));
+ if (type === 'box') {
+ const [points, occluded, zOrder] = this._getBoxPosition(shape,
+ Math.clamp(frame, this._startFrame, this._stopFrame));
path.shapes.push({
- frame: frame,
- occluded: occluded,
- outside: outside,
- xtl: xtl,
- ytl: ytl,
- xbr: xbr,
- ybr: ybr,
- z_order: z_order,
attributes: shapeAttributes,
+ type: 'rectangle',
+ frame,
+ occluded,
+ outside,
+ points,
+ zOrder,
});
- }
- else {
- let [points, occluded, z_order] = this._getPolyPosition(shape, Math.clamp(frame, this._startFrame, this._stopFrame));
+ } else {
+ const [points, occluded, zOrder] = this._getPolyPosition(shape,
+ Math.clamp(frame, this._startFrame, this._stopFrame));
path.shapes.push({
- frame: frame,
- occluded: occluded,
- outside: outside,
- points: points,
- z_order: z_order,
attributes: shapeAttributes,
+ type,
+ frame,
+ occluded,
+ outside,
+ points,
+ zOrder,
});
}
}
@@ -391,14 +403,33 @@ class AnnotationParser {
}
parse(text) {
- let xml = this._parser.parseFromString(text, 'text/xml');
- let parseerror = this._xmlParseError(xml);
+ const xml = this._parser.parseFromString(text, 'text/xml');
+ const parseerror = this._xmlParseError(xml);
if (parseerror.length) {
- throw Error('Annotation page parsing error. ' + parseerror[0].innerText);
+ throw Error(`Annotation page parsing error. ${parseerror[0].innerText}`);
}
- let interpolationData = this._parseInterpolationData(xml);
- let annotationData = this._parseAnnotationData(xml);
- return Object.assign({}, annotationData, interpolationData);
+ const interpolationData = this._parseInterpolationData(xml);
+ const annotationData = this._parseAnnotationData(xml);
+
+ const data = {
+ shapes: [],
+ tracks: [],
+ };
+
+
+ for (const type in interpolationData) {
+ if (Object.prototype.hasOwnProperty.call(interpolationData, type)) {
+ Array.prototype.push.apply(data.tracks, interpolationData[type]);
+ }
+ }
+
+ for (const type in annotationData) {
+ if (Object.prototype.hasOwnProperty.call(annotationData, type)) {
+ Array.prototype.push.apply(data.shapes, annotationData[type]);
+ }
+ }
+
+ return data;
}
}
diff --git a/cvat/apps/engine/static/engine/js/annotationSaver.js b/cvat/apps/engine/static/engine/js/annotationSaver.js
new file mode 100644
index 000000000000..5a034edaba97
--- /dev/null
+++ b/cvat/apps/engine/static/engine/js/annotationSaver.js
@@ -0,0 +1,402 @@
+/* exported buildAnnotationSaver */
+
+/* global
+ showOverlay:false
+ showMessage:false
+ Listener:false
+ Logger:false
+ Mousetrap:false
+*/
+
+
+class AnnotationSaverModel extends Listener {
+ constructor(initialData, shapeCollection) {
+ super('onAnnotationSaverUpdate', () => this._state);
+
+ this._state = {
+ status: null,
+ message: null,
+ };
+
+ this._version = initialData.version;
+ this._shapeCollection = shapeCollection;
+ this._initialObjects = [];
+
+ this._hash = this._getHash();
+
+ // We need use data from export instead of initialData
+ // Otherwise we have differ keys order and JSON comparison code incorrect
+ const data = this._shapeCollection.export()[0];
+ for (const shape of data.shapes) {
+ this._initialObjects[shape.id] = shape;
+ }
+
+ for (const track of data.tracks) {
+ this._initialObjects[track.id] = track;
+ }
+ }
+
+ async _request(data, action) {
+ return new Promise((resolve, reject) => {
+ $.ajax({
+ url: `/api/v1/jobs/${window.cvat.job.id}/annotations?action=${action}`,
+ type: 'PATCH',
+ data: JSON.stringify(data),
+ contentType: 'application/json',
+ }).done((savedData) => {
+ resolve(savedData);
+ }).fail((errorData) => {
+ const message = `Could not make ${action} annotations. Code: ${errorData.status}. `
+ + `Message: ${errorData.responseText || errorData.statusText}`;
+ reject(new Error(message));
+ });
+ });
+ }
+
+ async _put(data) {
+ return new Promise((resolve, reject) => {
+ $.ajax({
+ url: `/api/v1/jobs/${window.cvat.job.id}/annotations`,
+ type: 'PUT',
+ data: JSON.stringify(data),
+ contentType: 'application/json',
+ }).done((savedData) => {
+ resolve(savedData);
+ }).fail((errorData) => {
+ const message = `Could not put annotations. Code: ${errorData.status}. `
+ + `Message: ${errorData.responseText || errorData.statusText}`;
+ reject(new Error(message));
+ });
+ });
+ }
+
+ async _create(created) {
+ return this._request(created, 'create');
+ }
+
+ async _update(updated) {
+ return this._request(updated, 'update');
+ }
+
+ async _delete(deleted) {
+ return this._request(deleted, 'delete');
+ }
+
+ async _logs() {
+ Logger.addEvent(Logger.EventType.saveJob);
+ const totalStat = this._shapeCollection.collectStatistic()[1];
+ Logger.addEvent(Logger.EventType.sendTaskInfo, {
+ 'track count': totalStat.boxes.annotation + totalStat.boxes.interpolation
+ + totalStat.polygons.annotation + totalStat.polygons.interpolation
+ + totalStat.polylines.annotation + totalStat.polylines.interpolation
+ + totalStat.points.annotation + totalStat.points.interpolation,
+ 'frame count': window.cvat.player.frames.stop - window.cvat.player.frames.start + 1,
+ 'object count': totalStat.total,
+ 'box count': totalStat.boxes.annotation + totalStat.boxes.interpolation,
+ 'polygon count': totalStat.polygons.annotation + totalStat.polygons.interpolation,
+ 'polyline count': totalStat.polylines.annotation + totalStat.polylines.interpolation,
+ 'points count': totalStat.points.annotation + totalStat.points.interpolation,
+ });
+
+ const annotationLogs = Logger.getLogs();
+
+ return new Promise((resolve, reject) => {
+ $.ajax({
+ url: '/api/v1/server/logs',
+ type: 'POST',
+ data: JSON.stringify(annotationLogs.export()),
+ contentType: 'application/json',
+ }).done(() => {
+ resolve();
+ }).fail((errorData) => {
+ annotationLogs.save();
+ const message = `Could not send logs. Code: ${errorData.status}. `
+ + `Message: ${errorData.responseText || errorData.statusText}`;
+ reject(new Error(message));
+ });
+ });
+ }
+
+ _split(exported) {
+ const exportedIDs = Array.from(exported.shapes, shape => +shape.id)
+ .concat(Array.from(exported.tracks, track => +track.id));
+
+ const created = {
+ version: this._version,
+ shapes: [],
+ tracks: [],
+ tags: [],
+ };
+
+ const updated = {
+ version: this._version + 1,
+ shapes: [],
+ tracks: [],
+ tags: [],
+ };
+
+ const deleted = {
+ version: this._version + 2,
+ shapes: [],
+ tracks: [],
+ tags: [],
+ };
+
+ // Compare initial state objects and export state objects
+ // in order to get updated and created objects
+ for (const obj of exported.shapes.concat(exported.tracks)) {
+ if (obj.id in this._initialObjects) {
+ const exportedHash = JSON.stringify(obj);
+ const initialSash = JSON.stringify(this._initialObjects[obj.id]);
+ if (exportedHash !== initialSash) {
+ const target = 'shapes' in obj ? updated.tracks : updated.shapes;
+ target.push(obj);
+ }
+ } else if (typeof obj.id === 'undefined') {
+ const target = 'shapes' in obj ? created.tracks : created.shapes;
+ target.push(obj);
+ } else {
+ throw Error(`Bad object ID found: ${obj.id}. `
+ + 'It is not contained in initial state and have server ID');
+ }
+ }
+
+ // Compare initial state indexes and export state indexes
+ // in order to get removed objects
+ for (const shapeID in this._initialObjects) {
+ if (!exportedIDs.includes(+shapeID)) {
+ const initialShape = this._initialObjects[shapeID];
+ const target = 'shapes' in initialShape ? deleted.tracks : deleted.shapes;
+ target.push(initialShape);
+ }
+ }
+
+ return [created, updated, deleted];
+ }
+
+ _getHash() {
+ const exported = this._shapeCollection.export()[0];
+ return JSON.stringify(exported);
+ }
+
+ _updateCreatedObjects(objectsToSave, savedObjects, mapping) {
+ // Method setups IDs of created objects after saving on a server
+ const allSavedObjects = savedObjects.shapes.concat(savedObjects.tracks);
+ const allObjectsToSave = objectsToSave.shapes.concat(objectsToSave.tracks);
+ if (allSavedObjects.length !== allObjectsToSave.length) {
+ throw Error('Number of saved objects and objects to save is not match');
+ }
+
+ for (let idx = 0; idx < allSavedObjects.length; idx += 1) {
+ const objectModel = mapping.filter(el => el[0] === allObjectsToSave[idx])[0][1];
+ const { id } = allSavedObjects[idx];
+ objectModel.serverID = id;
+ allObjectsToSave[idx].id = id;
+ }
+
+ this._shapeCollection.update();
+ }
+
+ notify(status, message = null) {
+ this._state.status = status;
+ this._state.message = message;
+ Listener.prototype.notify.call(this);
+ }
+
+ hasUnsavedChanges() {
+ return this._getHash() !== this._hash;
+ }
+
+ async save() {
+ this.notify('saveStart');
+ try {
+ const [exported, mapping] = this._shapeCollection.export();
+ const { flush } = this._shapeCollection;
+ if (flush) {
+ const data = Object.assign({}, exported, {
+ version: this._version,
+ tags: [],
+ });
+
+ this._version += 1;
+
+ this.notify('saveCreated');
+ const savedObjects = await this._put(data);
+ this._updateCreatedObjects(exported, savedObjects, mapping);
+ this._shapeCollection.flush = false;
+ this._version = savedObjects.version;
+ for (const object of savedObjects.shapes.concat(savedObjects.tracks)) {
+ this._initialObjects[object.id] = object;
+ }
+
+ this._version = savedObjects.version;
+ } else {
+ const [created, updated, deleted] = this._split(exported);
+ this.notify('saveCreated');
+ const savedCreated = await this._create(created);
+ this._updateCreatedObjects(created, savedCreated, mapping);
+ this._version = savedCreated.version;
+ for (const object of created.shapes.concat(created.tracks)) {
+ this._initialObjects[object.id] = object;
+ }
+
+ this.notify('saveUpdated');
+ const savedUpdated = await this._update(updated);
+ this._version = savedUpdated.version;
+ for (const object of updated.shapes.concat(updated.tracks)) {
+ if (object.id in this._initialObjects) {
+ this._initialObjects[object.id] = object;
+ }
+ }
+
+ this.notify('saveDeleted');
+ const savedDeleted = await this._delete(deleted);
+ this._version = savedDeleted.version;
+ for (const object of savedDeleted.shapes.concat(savedDeleted.tracks)) {
+ if (object.id in this._initialObjects) {
+ delete this._initialObjects[object.id];
+ }
+ }
+
+ this._version = savedDeleted.version;
+ }
+
+ await this._logs();
+ } catch (error) {
+ this.notify('saveUnlocked');
+ this.notify('saveError', error.message);
+ this._state = {
+ status: null,
+ message: null,
+ };
+ throw Error(error);
+ }
+
+ this._hash = this._getHash();
+ this.notify('saveDone');
+
+ setTimeout(() => {
+ this.notify('saveUnlocked');
+ this._state = {
+ status: null,
+ message: null,
+ };
+ }, 1000);
+ }
+
+ get state() {
+ return JSON.parse(JSON.stringify(this._state));
+ }
+}
+
+class AnnotationSaverController {
+ constructor(model) {
+ this._model = model;
+ this._autoSaveInterval = null;
+
+ const { shortkeys } = window.cvat.config;
+ Mousetrap.bind(shortkeys.save_work.value, () => {
+ this.save();
+ return false;
+ }, 'keydown');
+ }
+
+ autoSave(enabled, time) {
+ if (this._autoSaveInterval) {
+ clearInterval(this._autoSaveInterval);
+ this._autoSaveInterval = null;
+ }
+
+ if (enabled) {
+ this._autoSaveInterval = setInterval(() => {
+ this.save();
+ }, time * 1000 * 60);
+ }
+ }
+
+ hasUnsavedChanges() {
+ return this._model.hasUnsavedChanges();
+ }
+
+ save() {
+ if (this._model.state.status === null) {
+ this._model.save().catch((error) => {
+ setTimeout(() => {
+ throw error;
+ });
+ });
+ }
+ }
+}
+
+
+class AnnotationSaverView {
+ constructor(model, controller) {
+ model.subscribe(this);
+
+ this._controller = controller;
+ this._overlay = null;
+
+ const { shortkeys } = window.cvat.config;
+ const saveHelp = `${shortkeys.save_work.view_value} - ${shortkeys.save_work.description}`;
+
+ this._saveButton = $('#saveButton').on('click', () => {
+ this._controller.save();
+ }).attr('title', saveHelp);
+
+ this._autoSaveBox = $('#autoSaveBox').on('change', (e) => {
+ const enabled = e.target.checked;
+ const time = +this._autoSaveTime.prop('value');
+ this._controller.autoSave(enabled, time);
+ });
+
+ this._autoSaveTime = $('#autoSaveTime').on('change', (e) => {
+ e.target.value = Math.clamp(+e.target.value, +e.target.min, +e.target.max);
+ this._autoSaveBox.trigger('change');
+ });
+
+ window.onbeforeunload = (e) => {
+ if (this._controller.hasUnsavedChanges()) { // eslint-disable-line react/no-this-in-sfc
+ const message = 'You have unsaved changes. Leave this page?';
+ e.returnValue = message;
+ return message;
+ }
+ return null;
+ };
+ }
+
+ onAnnotationSaverUpdate(state) {
+ if (state.status === 'saveStart') {
+ this._overlay = showOverlay('Annotations are being saved..');
+ this._saveButton.prop('disabled', true).text('Saving..');
+ } else if (state.status === 'saveDone') {
+ this._saveButton.text('Successful save');
+ this._overlay.remove();
+ } else if (state.status === 'saveError') {
+ this._saveButton.prop('disabled', false).text('Save Work');
+ const message = `Couldn't to save the job. Errors occured: ${state.message}. `
+ + 'Please report the problem to support team immediately.';
+ showMessage(message);
+ this._overlay.remove();
+ } else if (state.status === 'saveCreated') {
+ this._overlay.setMessage(`${this._overlay.getMessage()}
- Created objects are being saved..`); + } else if (state.status === 'saveUpdated') { + this._overlay.setMessage(`${this._overlay.getMessage()}
- Updated objects are being saved..`); + } else if (state.status === 'saveDeleted') { + this._overlay.setMessage(`${this._overlay.getMessage()}
- Deleted objects are being saved..`); + } else if (state.status === 'saveUnlocked') { + this._saveButton.prop('disabled', false).text('Save Work'); + } else { + const message = `Unknown state has been reached during annotation saving: ${state.status} ` + + 'Please report the problem to support team immediately.'; + showMessage(message); + } + } +} + + +function buildAnnotationSaver(initialData, shapeCollection) { + const model = new AnnotationSaverModel(initialData, shapeCollection); + const controller = new AnnotationSaverController(model); + new AnnotationSaverView(model, controller); +} diff --git a/cvat/apps/engine/static/engine/js/annotationUI.js b/cvat/apps/engine/static/engine/js/annotationUI.js index 1c1471f225c0..1044fb25cd5b 100644 --- a/cvat/apps/engine/static/engine/js/annotationUI.js +++ b/cvat/apps/engine/static/engine/js/annotationUI.js @@ -18,7 +18,6 @@ HistoryController:false HistoryModel:false HistoryView:false - IncrementIdGenerator:false, Logger:false Mousetrap:false PlayerController:false @@ -28,8 +27,6 @@ PolyshapeEditorModel:false PolyshapeEditorView:false PolyShapeView:false - saveJobRequest:false - serverRequest:false ShapeBufferController:false ShapeBufferModel:false ShapeBufferView:false @@ -47,296 +44,128 @@ ShapeMergerView:false showMessage:false showOverlay:false + buildAnnotationSaver:false + LabelsInfo:false */ -"use strict"; - -function callAnnotationUI(jid) { - initLogger(jid); - let loadJobEvent = Logger.addContinuedEvent(Logger.EventType.loadJob); - serverRequest("/get/job/" + jid, function(job) { - serverRequest("get/annotation/job/" + jid, function(data) { - $('#loadingOverlay').remove(); - setTimeout(() => { - buildAnnotationUI(job, data, loadJobEvent); - }, 0); - }); - }); -} - -function initLogger(jobID) { - if (!Logger.initializeLogger('CVAT', jobID)) - { - let message = 'Could not initialize Logger. Please immediately report the problem to support team'; +async function initLogger(jobID) { + if (!Logger.initializeLogger(jobID)) { + const message = 'Logger has been already initialized'; console.error(message); showMessage(message); return; } Logger.setTimeThreshold(Logger.EventType.zoomImage); - - serverRequest('/get/username', function(response) { - Logger.setUsername(response.username); - }); } -function buildAnnotationUI(job, shapeData, loadJobEvent) { - // Setup some API - window.cvat = { - labelsInfo: new LabelsInfo(job), - translate: new CoordinateTranslator(), - player: { - geometry: { - scale: 1, - }, - frames: { - current: job.start, - start: job.start, - stop: job.stop, - } - }, - mode: null, - job: { - z_order: job.z_order, - id: job.jobid, - images: job.image_meta_data, - }, - search: { - value: window.location.search, - - set: function(name, value) { - let searchParams = new URLSearchParams(this.value); - - if (typeof value === 'undefined' || value === null) { - if (searchParams.has(name)) { - searchParams.delete(name); - } - } - else searchParams.set(name, value); - this.value = `${searchParams.toString()}`; - }, - - get: function(name) { - try { - let decodedURI = decodeURIComponent(this.value); - let urlSearchParams = new URLSearchParams(decodedURI); - if (urlSearchParams.has(name)) { - return urlSearchParams.get(name); - } - else return null; - } - catch (error) { - showMessage('Bad URL has been found'); - this.value = window.location.href; - return null; - } - }, - - toString: function() { - return `${window.location.origin}/?${this.value}`; - } - } - }; - - // Remove external search parameters from url - window.history.replaceState(null, null, `${window.location.origin}/?id=${job.jobid}`); - window.cvat.config = new Config(); +function blurAllElements() { + document.activeElement.blur(); +} - // Setup components - let idGenerator = new IncrementIdGenerator(job.max_shape_id + 1); - let annotationParser = new AnnotationParser(job, window.cvat.labelsInfo, idGenerator); - let shapeCollectionModel = new ShapeCollectionModel(idGenerator).import(shapeData, true); - let shapeCollectionController = new ShapeCollectionController(shapeCollectionModel); - let shapeCollectionView = new ShapeCollectionView(shapeCollectionModel, shapeCollectionController); +function uploadAnnotation(shapeCollectionModel, historyModel, + annotationParser, uploadAnnotationButton) { + $('#annotationFileSelector').one('change', (changedFileEvent) => { + const file = changedFileEvent.target.files['0']; + changedFileEvent.target.value = ''; + if (!file || file.type !== 'text/xml') return; + uploadAnnotationButton.text('Preparing..'); + uploadAnnotationButton.prop('disabled', true); + const overlay = showOverlay('File is being uploaded..'); - // In case of old tasks that dont provide max saved shape id properly - if (job.max_shape_id === -1) { - idGenerator.reset(shapeCollectionModel.maxId + 1); - } + const fileReader = new FileReader(); + fileReader.onload = (loadedFileEvent) => { + let data = null; - window.cvat.data = { - get: () => shapeCollectionModel.exportAll(), - set: (data) => { - for (let type in data) { - for (let shape of data[type]) { - shape.id = idGenerator.next(); + const asyncParse = () => { + try { + data = annotationParser.parse(loadedFileEvent.target.result); + } catch (err) { + overlay.remove(); + showMessage(err.message); + return; + } finally { + uploadAnnotationButton.text('Upload Annotation'); + uploadAnnotationButton.prop('disabled', false); } - } - shapeCollectionModel.import(data, false); - shapeCollectionModel.update(); - }, - clear: () => shapeCollectionModel.empty(), - }; - - let shapeBufferModel = new ShapeBufferModel(shapeCollectionModel); - let shapeBufferController = new ShapeBufferController(shapeBufferModel); - let shapeBufferView = new ShapeBufferView(shapeBufferModel, shapeBufferController); - - $('#shapeModeSelector').prop('value', job.mode); - let shapeCreatorModel = new ShapeCreatorModel(shapeCollectionModel, job); - let shapeCreatorController = new ShapeCreatorController(shapeCreatorModel); - let shapeCreatorView = new ShapeCreatorView(shapeCreatorModel, shapeCreatorController); - - let polyshapeEditorModel = new PolyshapeEditorModel(); - let polyshapeEditorController = new PolyshapeEditorController(polyshapeEditorModel); - let polyshapeEditorView = new PolyshapeEditorView(polyshapeEditorModel, polyshapeEditorController); - - // Add static member for class. It will be used by all polyshapes. - PolyShapeView.editor = polyshapeEditorModel; - - let shapeMergerModel = new ShapeMergerModel(shapeCollectionModel); - let shapeMergerController = new ShapeMergerController(shapeMergerModel); - new ShapeMergerView(shapeMergerModel, shapeMergerController); - - let shapeGrouperModel = new ShapeGrouperModel(shapeCollectionModel); - let shapeGrouperController = new ShapeGrouperController(shapeGrouperModel); - let shapeGrouperView = new ShapeGrouperView(shapeGrouperModel, shapeGrouperController); - - let aamModel = new AAMModel(shapeCollectionModel, (xtl, xbr, ytl, ybr) => { - playerModel.focus(xtl, xbr, ytl, ybr); - }, () => { - playerModel.fit(); - }); - let aamController = new AAMController(aamModel); - new AAMView(aamModel, aamController); - - shapeCreatorModel.subscribe(shapeCollectionModel); - shapeGrouperModel.subscribe(shapeCollectionView); - shapeCollectionModel.subscribe(shapeGrouperModel); - - $('#playerProgress').css('width', $('#player')["0"].clientWidth - 420); - - let playerGeometry = { - width: $('#playerFrame').width(), - height: $('#playerFrame').height(), - }; - - let playerModel = new PlayerModel(job, playerGeometry); - let playerController = new PlayerController(playerModel, - () => shapeCollectionModel.activeShape, - (direction) => shapeCollectionModel.find(direction), - Object.assign({}, playerGeometry, { - left: $('#playerFrame').offset().left, - top: $('#playerFrame').offset().top, - }), job); - new PlayerView(playerModel, playerController, job); - - let historyModel = new HistoryModel(playerModel, idGenerator); - let historyController = new HistoryController(historyModel); - new HistoryView(historyController, historyModel); - - playerModel.subscribe(shapeCollectionModel); - playerModel.subscribe(shapeCollectionView); - playerModel.subscribe(shapeCreatorView); - playerModel.subscribe(shapeBufferView); - playerModel.subscribe(shapeGrouperView); - playerModel.subscribe(polyshapeEditorView); - playerModel.shift(window.cvat.search.get('frame') || 0, true); - - let shortkeys = window.cvat.config.shortkeys; - - setupHelpWindow(shortkeys); - setupSettingsWindow(); - setupMenu(job, shapeCollectionModel, annotationParser, aamModel, playerModel, historyModel); - setupFrameFilters(); - setupShortkeys(shortkeys, { - aam: aamModel, - shapeCreator: shapeCreatorModel, - shapeMerger: shapeMergerModel, - shapeGrouper: shapeGrouperModel, - shapeBuffer: shapeBufferModel, - shapeEditor: polyshapeEditorModel - }); - - $(window).on('click', function(event) { - Logger.updateUserActivityTimer(); - if (event.target.classList.contains('modal') && !event.target.classList.contains('force-modal')) { - event.target.classList.add('hidden'); - } - }); - - let totalStat = shapeCollectionModel.collectStatistic()[1]; - loadJobEvent.addValues({ - 'track count': totalStat.boxes.annotation + totalStat.boxes.interpolation + - totalStat.polygons.annotation + totalStat.polygons.interpolation + - totalStat.polylines.annotation + totalStat.polylines.interpolation + - totalStat.points.annotation + totalStat.points.interpolation, - 'frame count': job.stop - job.start + 1, - 'object count': totalStat.total, - 'box count': totalStat.boxes.annotation + totalStat.boxes.interpolation, - 'polygon count': totalStat.polygons.annotation + totalStat.polygons.interpolation, - 'polyline count': totalStat.polylines.annotation + totalStat.polylines.interpolation, - 'points count': totalStat.points.annotation + totalStat.points.interpolation, - }); - loadJobEvent.close(); - - window.onbeforeunload = function(e) { - if (shapeCollectionModel.hasUnsavedChanges()) { - let message = "You have unsaved changes. Leave this page?"; - e.returnValue = message; - return message; - } - return; - }; - - $('#player').on('click', (e) => { - if (e.target.tagName.toLowerCase() != 'input') { - blurAllElements(); - } - }); -} + const asyncImport = () => { + try { + historyModel.empty(); + shapeCollectionModel.empty(); + shapeCollectionModel.import(data); + shapeCollectionModel.update(); + } finally { + overlay.remove(); + } + }; + overlay.setMessage('Data are being imported..'); + setTimeout(asyncImport); + }; -function copyToClipboard(text) { - let tempInput = $(""); - $("body").append(tempInput); - tempInput.prop('value', text).select(); - document.execCommand("copy"); - tempInput.remove(); + overlay.setMessage('File is being parsed..'); + setTimeout(asyncParse); + }; + fileReader.readAsText(file); + }).click(); } function setupFrameFilters() { - let brightnessRange = $('#playerBrightnessRange'); - let contrastRange = $('#playerContrastRange'); - let saturationRange = $('#playerSaturationRange'); - let frameBackground = $('#frameBackground'); - let reset = $('#resetPlayerFilterButton'); + const brightnessRange = $('#playerBrightnessRange'); + const contrastRange = $('#playerContrastRange'); + const saturationRange = $('#playerSaturationRange'); + const frameBackground = $('#frameBackground'); + const reset = $('#resetPlayerFilterButton'); let brightness = 100; let contrast = 100; let saturation = 100; - let shortkeys = window.cvat.config.shortkeys; + const { shortkeys } = window.cvat.config; + + function updateFilterParameters() { + frameBackground.css('filter', `contrast(${contrast}%) brightness(${brightness}%) saturate(${saturation}%)`); + } + brightnessRange.attr('title', ` - ${shortkeys['change_player_brightness'].view_value} - ${shortkeys['change_player_brightness'].description}`); + ${shortkeys.change_player_brightness.view_value} - ${shortkeys.change_player_brightness.description}`); contrastRange.attr('title', ` - ${shortkeys['change_player_contrast'].view_value} - ${shortkeys['change_player_contrast'].description}`); + ${shortkeys.change_player_contrast.view_value} - ${shortkeys.change_player_contrast.description}`); saturationRange.attr('title', ` - ${shortkeys['change_player_saturation'].view_value} - ${shortkeys['change_player_saturation'].description}`); + ${shortkeys.change_player_saturation.view_value} - ${shortkeys.change_player_saturation.description}`); - let changeBrightnessHandler = Logger.shortkeyLogDecorator(function(e) { - if (e.shiftKey) brightnessRange.prop('value', brightness + 10).trigger('input'); - else brightnessRange.prop('value', brightness - 10).trigger('input'); + const changeBrightnessHandler = Logger.shortkeyLogDecorator((e) => { + if (e.shiftKey) { + brightnessRange.prop('value', brightness + 10).trigger('input'); + } else { + brightnessRange.prop('value', brightness - 10).trigger('input'); + } }); - let changeContrastHandler = Logger.shortkeyLogDecorator(function(e) { - if (e.shiftKey) contrastRange.prop('value', contrast + 10).trigger('input'); - else contrastRange.prop('value', contrast - 10).trigger('input'); + const changeContrastHandler = Logger.shortkeyLogDecorator((e) => { + if (e.shiftKey) { + contrastRange.prop('value', contrast + 10).trigger('input'); + } else { + contrastRange.prop('value', contrast - 10).trigger('input'); + } }); - let changeSaturationHandler = Logger.shortkeyLogDecorator(function(e) { - if (e.shiftKey) saturationRange.prop('value', saturation + 10).trigger('input'); - else saturationRange.prop('value', saturation - 10).trigger('input'); + const changeSaturationHandler = Logger.shortkeyLogDecorator((e) => { + if (e.shiftKey) { + saturationRange.prop('value', saturation + 10).trigger('input'); + } else { + saturationRange.prop('value', saturation - 10).trigger('input'); + } }); - Mousetrap.bind(shortkeys["change_player_brightness"].value, changeBrightnessHandler, 'keydown'); - Mousetrap.bind(shortkeys["change_player_contrast"].value, changeContrastHandler, 'keydown'); - Mousetrap.bind(shortkeys["change_player_saturation"].value, changeSaturationHandler, 'keydown'); + Mousetrap.bind(shortkeys.change_player_brightness.value, changeBrightnessHandler, 'keydown'); + Mousetrap.bind(shortkeys.change_player_contrast.value, changeContrastHandler, 'keydown'); + Mousetrap.bind(shortkeys.change_player_saturation.value, changeSaturationHandler, 'keydown'); - reset.on('click', function() { + reset.on('click', () => { brightness = 100; contrast = 100; saturation = 100; @@ -346,74 +175,61 @@ function setupFrameFilters() { updateFilterParameters(); }); - brightnessRange.on('input', function(e) { - let value = Math.clamp(+e.target.value, +e.target.min, +e.target.max); - brightness = e.target.value = value; + brightnessRange.on('input', (e) => { + const value = Math.clamp(+e.target.value, +e.target.min, +e.target.max); + e.target.value = value; + brightness = value; updateFilterParameters(); }); - contrastRange.on('input', function(e) { - let value = Math.clamp(+e.target.value, +e.target.min, +e.target.max); - contrast = e.target.value = value; + contrastRange.on('input', (e) => { + const value = Math.clamp(+e.target.value, +e.target.min, +e.target.max); + e.target.value = value; + contrast = value; updateFilterParameters(); }); - saturationRange.on('input', function(e) { - let value = Math.clamp(+e.target.value, +e.target.min, +e.target.max); - saturation = e.target.value = value; + saturationRange.on('input', (e) => { + const value = Math.clamp(+e.target.value, +e.target.min, +e.target.max); + e.target.value = value; + saturation = value; updateFilterParameters(); }); - - function updateFilterParameters() { - frameBackground.css('filter', `contrast(${contrast}%) brightness(${brightness}%) saturate(${saturation}%)`); - } } function setupShortkeys(shortkeys, models) { - let annotationMenu = $('#annotationMenu'); - let settingsWindow = $('#settingsWindow'); - let helpWindow = $('#helpWindow'); + const annotationMenu = $('#annotationMenu'); + const settingsWindow = $('#settingsWindow'); + const helpWindow = $('#helpWindow'); - Mousetrap.prototype.stopCallback = function() { - return false; - }; + Mousetrap.prototype.stopCallback = () => false; - let openHelpHandler = Logger.shortkeyLogDecorator(function() { - let helpInvisible = helpWindow.hasClass('hidden'); + const openHelpHandler = Logger.shortkeyLogDecorator(() => { + const helpInvisible = helpWindow.hasClass('hidden'); if (helpInvisible) { annotationMenu.addClass('hidden'); settingsWindow.addClass('hidden'); helpWindow.removeClass('hidden'); - } - else { + } else { helpWindow.addClass('hidden'); } return false; }); - let openSettingsHandler = Logger.shortkeyLogDecorator(function() { - let settingsInvisible = settingsWindow.hasClass('hidden'); + const openSettingsHandler = Logger.shortkeyLogDecorator(() => { + const settingsInvisible = settingsWindow.hasClass('hidden'); if (settingsInvisible) { annotationMenu.addClass('hidden'); helpWindow.addClass('hidden'); settingsWindow.removeClass('hidden'); - } - else { + } else { $('#settingsWindow').addClass('hidden'); } return false; }); - let saveHandler = Logger.shortkeyLogDecorator(function() { - let saveButtonLocked = $('#saveButton').prop('disabled'); - if (!saveButtonLocked) { - $('#saveButton').click(); - } - return false; - }); - - let cancelModeHandler = Logger.shortkeyLogDecorator(function() { + const cancelModeHandler = Logger.shortkeyLogDecorator(() => { switch (window.cvat.mode) { case 'aam': models.aam.switchAAMMode(); @@ -433,83 +249,62 @@ function setupShortkeys(shortkeys, models) { case 'poly_editing': models.shapeEditor.finish(); break; + default: + break; } return false; }); - Mousetrap.bind(shortkeys["open_help"].value, openHelpHandler, 'keydown'); - Mousetrap.bind(shortkeys["open_settings"].value, openSettingsHandler, 'keydown'); - Mousetrap.bind(shortkeys["save_work"].value, saveHandler, 'keydown'); - Mousetrap.bind(shortkeys["cancel_mode"].value, cancelModeHandler, 'keydown'); + Mousetrap.bind(shortkeys.open_help.value, openHelpHandler, 'keydown'); + Mousetrap.bind(shortkeys.open_settings.value, openSettingsHandler, 'keydown'); + Mousetrap.bind(shortkeys.cancel_mode.value, cancelModeHandler, 'keydown'); } function setupHelpWindow(shortkeys) { - let closeHelpButton = $('#closeHelpButton'); - let helpTable = $('#shortkeyHelpTable'); + const closeHelpButton = $('#closeHelpButton'); + const helpTable = $('#shortkeyHelpTable'); - closeHelpButton.on('click', function() { + closeHelpButton.on('click', () => { $('#helpWindow').addClass('hidden'); }); - for (let key in shortkeys) { - helpTable.append($(` ${shortkeys[key].view_value} ${shortkeys[key].description} `));
+ for (const key in shortkeys) {
+ if (Object.prototype.hasOwnProperty.call(shortkeys, key)) {
+ helpTable.append($(` ${shortkeys[key].view_value} ${shortkeys[key].description} `));
+ }
}
}
function setupSettingsWindow() {
- let closeSettingsButton = $('#closeSettignsButton');
- let autoSaveBox = $('#autoSaveBox');
- let autoSaveTime = $('#autoSaveTime');
+ const closeSettingsButton = $('#closeSettignsButton');
- closeSettingsButton.on('click', function() {
+ closeSettingsButton.on('click', () => {
$('#settingsWindow').addClass('hidden');
});
-
- let saveInterval = null;
- autoSaveBox.on('change', function(e) {
- if (saveInterval) {
- clearInterval(saveInterval);
- saveInterval = null;
- }
-
- if (e.target.checked) {
- let time = +autoSaveTime.prop('value');
- saveInterval = setInterval(() => {
- let saveButton = $('#saveButton');
- if (!saveButton.prop('disabled')) {
- saveButton.click();
- }
- }, time * 1000 * 60);
- }
-
- autoSaveTime.on('change', () => {
- let value = Math.clamp(+e.target.value, +e.target.min, +e.target.max);
- e.target.value = value;
- autoSaveBox.trigger('change');
- });
- });
}
-function setupMenu(job, shapeCollectionModel, annotationParser, aamModel, playerModel, historyModel) {
- let annotationMenu = $('#annotationMenu');
- let menuButton = $('#menuButton');
+function setupMenu(job, task, shapeCollectionModel,
+ annotationParser, aamModel, playerModel, historyModel) {
+ const annotationMenu = $('#annotationMenu');
+ const menuButton = $('#menuButton');
function hide() {
annotationMenu.addClass('hidden');
}
- (function setupVisibility() {
+ function setupVisibility() {
let timer = null;
menuButton.on('click', () => {
- let [byLabelsStat, totalStat] = shapeCollectionModel.collectStatistic();
- let table = $('#annotationStatisticTable');
+ const [byLabelsStat, totalStat] = shapeCollectionModel.collectStatistic();
+ const table = $('#annotationStatisticTable');
table.find('.temporaryStatisticRow').remove();
- for (let labelId in byLabelsStat) {
- $(`
+ for (const labelId in byLabelsStat) {
+ if (Object.prototype.hasOwnProperty.call(byLabelsStat, labelId)) {
+ $(`
${window.cvat.labelsInfo.labels()[labelId].normalize()}
${byLabelsStat[labelId].boxes.annotation}
${byLabelsStat[labelId].boxes.interpolation}
@@ -523,6 +318,7 @@ function setupMenu(job, shapeCollectionModel, annotationParser, aamModel, player
${byLabelsStat[labelId].interpolated}
${byLabelsStat[labelId].total}
`).addClass('temporaryStatisticRow').appendTo(table);
+ }
}
$(`
@@ -543,7 +339,7 @@ function setupMenu(job, shapeCollectionModel, annotationParser, aamModel, player
menuButton.on('click', () => {
annotationMenu.removeClass('hidden');
- annotationMenu.css('top', menuButton.offset().top - annotationMenu.height() - menuButton.height() + 'px');
+ annotationMenu.css('top', `${menuButton.offset().top - annotationMenu.height() - menuButton.height()}px`);
if (timer) {
clearTimeout(timer);
timer = null;
@@ -561,50 +357,65 @@ function setupMenu(job, shapeCollectionModel, annotationParser, aamModel, player
timer = setTimeout(hide, 500);
});
- annotationMenu.on('mouseover', function() {
+ annotationMenu.on('mouseover', () => {
if (timer) {
clearTimeout(timer);
timer = null;
}
});
- })();
-
- $('#statTaskName').text(job.slug);
- $('#statFrames').text(`[${job.start}-${job.stop}]`);
- $('#statOverlap').text(job.overlap);
- $('#statZOrder').text(job.z_order);
- $('#statFlipped').text(job.flipped);
- $('#statTaskStatus').prop("value", job.status).on('change', (e) => {
- $.ajax({
- type: 'POST',
- url: 'save/status/job/' + window.cvat.job.id,
- data: JSON.stringify({
- status: e.target.value
- }),
- contentType: "application/json; charset=utf-8",
- error: (data) => {
- showMessage(`Can not change job status. Code: ${data.status}. Message: ${data.responeText || data.statusText}`);
- }
- });
+ }
+
+ setupVisibility();
+
+ $('#statTaskName').text(task.name);
+ $('#statFrames').text(`[${window.cvat.player.frames.start}-${window.cvat.player.frames.stop}]`);
+ $('#statOverlap').text(task.overlap);
+ $('#statZOrder').text(task.z_order);
+ $('#statFlipped').text(task.flipped);
+ $('#statTaskStatus').prop('value', job.status).on('change', async (e) => {
+ try {
+ const jobCopy = JSON.parse(JSON.stringify(job));
+ jobCopy.status = e.target.value;
+
+ await $.ajax({
+ url: `/api/v1/jobs/${window.cvat.job.id}`,
+ type: 'PATCH',
+ data: JSON.stringify(jobCopy),
+ contentType: 'application/json',
+ });
+ } catch (errorData) {
+ const message = `Can not update a job status. Code: ${errorData.status}. `
+ + `Message: ${errorData.responseText || errorData.statusText}`;
+ showMessage(message);
+ }
});
- let shortkeys = window.cvat.config.shortkeys;
+ const { shortkeys } = window.cvat.config;
$('#helpButton').on('click', () => {
hide();
$('#helpWindow').removeClass('hidden');
});
+
$('#helpButton').attr('title', `
- ${shortkeys['open_help'].view_value} - ${shortkeys['open_help'].description}`);
+ ${shortkeys.open_help.view_value} - ${shortkeys.open_help.description}`);
$('#settingsButton').on('click', () => {
hide();
$('#settingsWindow').removeClass('hidden');
});
- $('#settingsButton').attr('title', `
- ${shortkeys['open_settings'].view_value} - ${shortkeys['open_settings'].description}`);
- $('#downloadAnnotationButton').on('click', (e) => {
- dumpAnnotationRequest(e.target, job.taskid, job.slug);
+ $('#settingsButton').attr('title', `
+ ${shortkeys.open_settings.view_value} - ${shortkeys.open_settings.description}`);
+
+ $('#downloadAnnotationButton').on('click', async (e) => {
+ e.target.disabled = true;
+ try {
+ await dumpAnnotationRequest(task.id, task.name);
+ } catch (error) {
+ showMessage(error.message);
+ } finally {
+ e.target.disabled = false;
+ }
});
$('#uploadAnnotationButton').on('click', () => {
@@ -612,8 +423,7 @@ function setupMenu(job, shapeCollectionModel, annotationParser, aamModel, player
userConfirm('Current annotation will be removed from the client. Continue?',
() => {
uploadAnnotation(shapeCollectionModel, historyModel, annotationParser, $('#uploadAnnotationButton'));
- }
- );
+ });
});
$('#removeAnnotationButton').on('click', () => {
@@ -623,17 +433,10 @@ function setupMenu(job, shapeCollectionModel, annotationParser, aamModel, player
() => {
historyModel.empty();
shapeCollectionModel.empty();
- }
- );
+ });
}
});
- $('#saveButton').on('click', () => {
- saveAnnotation(shapeCollectionModel, job);
- });
- $('#saveButton').attr('title', `
- ${shortkeys['save_work'].view_value} - ${shortkeys['save_work'].description}`);
-
// JS function cancelFullScreen don't work after pressing
// and it is famous problem.
$('#fullScreenButton').on('click', () => {
@@ -654,135 +457,268 @@ function setupMenu(job, shapeCollectionModel, annotationParser, aamModel, player
});
$('#switchAAMButton').attr('title', `
- ${shortkeys['switch_aam_mode'].view_value} - ${shortkeys['switch_aam_mode'].description}`);
+ ${shortkeys.switch_aam_mode.view_value} - ${shortkeys.switch_aam_mode.description}`);
}
-function drawBoxSize(boxScene, textScene, box) {
- let clientBox = window.cvat.translate.box.canvasToClient(boxScene.node, box);
- let text = `${box.width.toFixed(1)}x${box.height.toFixed(1)}`;
- let obj = this && this.textUI && this.rm ? this : {
- textUI: textScene.text('').font({
- weight: 'bolder'
- }).fill('white'),
+function buildAnnotationUI(jobData, taskData, imageMetaData, annotationData, loadJobEvent) {
+ // Setup some API
+ window.cvat = {
+ labelsInfo: new LabelsInfo(taskData.labels),
+ translate: new CoordinateTranslator(),
+ player: {
+ geometry: {
+ scale: 1,
+ },
+ frames: {
+ current: jobData.start_frame,
+ start: jobData.start_frame,
+ stop: jobData.stop_frame,
+ },
+ },
+ mode: null,
+ job: {
+ z_order: taskData.z_order,
+ id: jobData.id,
+ task_id: taskData.id,
+ images: imageMetaData,
+ },
+ search: {
+ value: window.location.search,
- rm: function() {
- if (this.textUI) {
- this.textUI.remove();
- }
- }
+ set(name, value) {
+ const searchParams = new URLSearchParams(this.value);
+
+ if (typeof value === 'undefined' || value === null) {
+ if (searchParams.has(name)) {
+ searchParams.delete(name);
+ }
+ } else {
+ searchParams.set(name, value);
+ }
+ this.value = `${searchParams.toString()}`;
+ },
+
+ get(name) {
+ try {
+ const decodedURI = decodeURIComponent(this.value);
+ const urlSearchParams = new URLSearchParams(decodedURI);
+ if (urlSearchParams.has(name)) {
+ return urlSearchParams.get(name);
+ }
+ return null;
+ } catch (error) {
+ showMessage('Bad URL has been received');
+ this.value = window.location.href;
+ return null;
+ }
+ },
+
+ toString() {
+ return `${window.location.origin}/?${this.value}`;
+ },
+ },
};
- let textPoint = window.cvat.translate.point.clientToCanvas(textScene.node, clientBox.x, clientBox.y);
+ // Remove external search parameters from url
+ window.history.replaceState(null, null, `${window.location.origin}/?id=${jobData.id}`);
- obj.textUI.clear().plain(text);
- obj.textUI.addClass("shapeText");
- obj.textUI.move(textPoint.x, textPoint.y);
+ window.cvat.config = new Config();
- return obj;
-}
+ // Setup components
+ const annotationParser = new AnnotationParser({
+ start: window.cvat.player.frames.start,
+ stop: window.cvat.player.frames.stop,
+ flipped: taskData.flipped,
+ image_meta_data: imageMetaData,
+ }, window.cvat.labelsInfo);
+ const shapeCollectionModel = new ShapeCollectionModel().import(annotationData);
+ const shapeCollectionController = new ShapeCollectionController(shapeCollectionModel);
+ const shapeCollectionView = new ShapeCollectionView(shapeCollectionModel,
+ shapeCollectionController);
-function uploadAnnotation(shapeCollectionModel, historyModel, annotationParser, uploadAnnotationButton) {
- $('#annotationFileSelector').one('change', (e) => {
- let file = e.target.files['0'];
- e.target.value = "";
- if (!file || file.type != 'text/xml') return;
- uploadAnnotationButton.text('Preparing..');
- uploadAnnotationButton.prop('disabled', true);
- let overlay = showOverlay("File is being uploaded..");
+ buildAnnotationSaver(annotationData, shapeCollectionModel);
- let fileReader = new FileReader();
- fileReader.onload = function(e) {
- let data = null;
+ window.cvat.data = {
+ get: () => shapeCollectionModel.export()[0],
+ set: (data) => {
+ shapeCollectionModel.import(data);
+ shapeCollectionModel.update();
+ },
+ clear: () => shapeCollectionModel.empty(),
+ };
- let asyncParse = function() {
- try {
- data = annotationParser.parse(e.target.result);
- }
- catch (err) {
- overlay.remove();
- showMessage(err.message);
- return;
- }
- finally {
- uploadAnnotationButton.text('Upload Annotation');
- uploadAnnotationButton.prop('disabled', false);
- }
+ const shapeBufferModel = new ShapeBufferModel(shapeCollectionModel);
+ const shapeBufferController = new ShapeBufferController(shapeBufferModel);
+ const shapeBufferView = new ShapeBufferView(shapeBufferModel, shapeBufferController);
- let asyncImport = function() {
- try {
- historyModel.empty();
- shapeCollectionModel.empty();
- shapeCollectionModel.import(data, false);
- shapeCollectionModel.update();
- }
- finally {
- overlay.remove();
- }
- };
+ $('#shapeModeSelector').prop('value', taskData.mode);
+ const shapeCreatorModel = new ShapeCreatorModel(shapeCollectionModel);
+ const shapeCreatorController = new ShapeCreatorController(shapeCreatorModel);
+ const shapeCreatorView = new ShapeCreatorView(shapeCreatorModel, shapeCreatorController);
- overlay.setMessage('Data are being imported..');
- setTimeout(asyncImport);
- };
+ const polyshapeEditorModel = new PolyshapeEditorModel();
+ const polyshapeEditorController = new PolyshapeEditorController(polyshapeEditorModel);
+ const polyshapeEditorView = new PolyshapeEditorView(polyshapeEditorModel,
+ polyshapeEditorController);
- overlay.setMessage('File is being parsed..');
- setTimeout(asyncParse);
- };
- fileReader.readAsText(file);
- }).click();
-}
+ // Add static member for class. It will be used by all polyshapes.
+ PolyShapeView.editor = polyshapeEditorModel;
+ const shapeMergerModel = new ShapeMergerModel(shapeCollectionModel);
+ const shapeMergerController = new ShapeMergerController(shapeMergerModel);
+ new ShapeMergerView(shapeMergerModel, shapeMergerController);
+
+ const shapeGrouperModel = new ShapeGrouperModel(shapeCollectionModel);
+ const shapeGrouperController = new ShapeGrouperController(shapeGrouperModel);
+ const shapeGrouperView = new ShapeGrouperView(shapeGrouperModel, shapeGrouperController);
-function saveAnnotation(shapeCollectionModel, job) {
- let saveButton = $('#saveButton');
+ const playerGeometry = {
+ width: $('#playerFrame').width(),
+ height: $('#playerFrame').height(),
+ };
- Logger.addEvent(Logger.EventType.saveJob);
- let totalStat = shapeCollectionModel.collectStatistic()[1];
- Logger.addEvent(Logger.EventType.sendTaskInfo, {
- 'track count': totalStat.boxes.annotation + totalStat.boxes.interpolation +
- totalStat.polygons.annotation + totalStat.polygons.interpolation +
- totalStat.polylines.annotation + totalStat.polylines.interpolation +
- totalStat.points.annotation + totalStat.points.interpolation,
- 'frame count': job.stop - job.start + 1,
+ const playerModel = new PlayerModel(taskData, playerGeometry);
+ const playerController = new PlayerController(playerModel,
+ () => shapeCollectionModel.activeShape,
+ direction => shapeCollectionModel.find(direction),
+ Object.assign({}, playerGeometry, {
+ left: $('#playerFrame').offset().left,
+ top: $('#playerFrame').offset().top,
+ }));
+ new PlayerView(playerModel, playerController);
+
+
+ const aamModel = new AAMModel(shapeCollectionModel, (xtl, xbr, ytl, ybr) => {
+ playerModel.focus(xtl, xbr, ytl, ybr);
+ }, () => {
+ playerModel.fit();
+ });
+ const aamController = new AAMController(aamModel);
+ new AAMView(aamModel, aamController);
+
+ shapeCreatorModel.subscribe(shapeCollectionModel);
+ shapeGrouperModel.subscribe(shapeCollectionView);
+ shapeCollectionModel.subscribe(shapeGrouperModel);
+
+ $('#playerProgress').css('width', $('#player')['0'].clientWidth - 420);
+
+ const historyModel = new HistoryModel(playerModel);
+ const historyController = new HistoryController(historyModel);
+ new HistoryView(historyController, historyModel);
+
+ playerModel.subscribe(shapeCollectionModel);
+ playerModel.subscribe(shapeCollectionView);
+ playerModel.subscribe(shapeCreatorView);
+ playerModel.subscribe(shapeBufferView);
+ playerModel.subscribe(shapeGrouperView);
+ playerModel.subscribe(polyshapeEditorView);
+ playerModel.shift(window.cvat.search.get('frame') || 0, true);
+
+ const { shortkeys } = window.cvat.config;
+
+ setupHelpWindow(shortkeys);
+ setupSettingsWindow();
+ setupMenu(jobData, taskData, shapeCollectionModel,
+ annotationParser, aamModel, playerModel, historyModel);
+ setupFrameFilters();
+ setupShortkeys(shortkeys, {
+ aam: aamModel,
+ shapeCreator: shapeCreatorModel,
+ shapeMerger: shapeMergerModel,
+ shapeGrouper: shapeGrouperModel,
+ shapeBuffer: shapeBufferModel,
+ shapeEditor: polyshapeEditorModel,
+ });
+
+ $(window).on('click', (event) => {
+ Logger.updateUserActivityTimer();
+ if (event.target.classList.contains('modal') && !event.target.classList.contains('force-modal')) {
+ event.target.classList.add('hidden');
+ }
+ });
+
+ const totalStat = shapeCollectionModel.collectStatistic()[1];
+ loadJobEvent.addValues({
+ 'track count': totalStat.boxes.annotation + totalStat.boxes.interpolation
+ + totalStat.polygons.annotation + totalStat.polygons.interpolation
+ + totalStat.polylines.annotation + totalStat.polylines.interpolation
+ + totalStat.points.annotation + totalStat.points.interpolation,
+ 'frame count': window.cvat.player.frames.stop - window.cvat.player.frames.start + 1,
'object count': totalStat.total,
'box count': totalStat.boxes.annotation + totalStat.boxes.interpolation,
'polygon count': totalStat.polygons.annotation + totalStat.polygons.interpolation,
'polyline count': totalStat.polylines.annotation + totalStat.polylines.interpolation,
'points count': totalStat.points.annotation + totalStat.points.interpolation,
});
+ loadJobEvent.close();
+
+ $('#player').on('click', (e) => {
+ if (e.target.tagName.toLowerCase() !== 'input') {
+ blurAllElements();
+ }
+ });
+}
- const exportedData = shapeCollectionModel.export();
- shapeCollectionModel.updateExportedState();
- const annotationLogs = Logger.getLogs();
- const data = {
- annotation: JSON.stringify(exportedData),
- logs: JSON.stringify(annotationLogs.export()),
- };
+function callAnnotationUI(jid) {
+ function onError(errorData) {
+ $('body').empty();
+ const message = `Can not build CVAT annotation UI. Code: ${errorData.status}. `
+ + `Message: ${errorData.responseText || errorData.statusText}`;
+ showMessage(message);
+ }
- saveButton.prop('disabled', true);
- saveButton.text('Saving..');
-
- saveJobRequest(job.jobid, data, () => {
- // success
- shapeCollectionModel.confirmExportedState();
- saveButton.text('Success!');
- setTimeout(() => {
- saveButton.prop('disabled', false);
- saveButton.text('Save Work');
- }, 3000);
- }, (response) => {
- // error
- saveButton.prop('disabled', false);
- saveButton.text('Save Work');
- let message = `Impossible to save job. Errors was occured. Status: ${response.status}`;
- showMessage(message + ' ' + 'Please immediately report the problem to support team');
- throw Error(message);
- });
+ initLogger(jid);
+
+ const loadJobEvent = Logger.addContinuedEvent(Logger.EventType.loadJob);
+ $.get(`/api/v1/jobs/${jid}`).done((jobData) => {
+ $.when(
+ $.get(`/api/v1/tasks/${jobData.task_id}`),
+ $.get(`/api/v1/tasks/${jobData.task_id}/frames/meta`),
+ $.get(`/api/v1/jobs/${jid}/annotations`),
+ ).then((taskData, imageMetaData, annotationData) => {
+ $('#loadingOverlay').remove();
+ setTimeout(() => {
+ buildAnnotationUI(jobData, taskData[0],
+ imageMetaData[0], annotationData[0], loadJobEvent);
+ });
+ }).fail(onError);
+ }).fail(onError);
}
-function blurAllElements() {
- document.activeElement.blur();
+function copyToClipboard(text) {
+ const tempInput = $('');
+ $('body').append(tempInput);
+ tempInput.prop('value', text).select();
+ document.execCommand('copy');
+ tempInput.remove();
+}
+
+
+function drawBoxSize(boxScene, textScene, box) {
+ const clientBox = window.cvat.translate.box.canvasToClient(boxScene.node, box);
+ const text = `${box.width.toFixed(1)}x${box.height.toFixed(1)}`;
+ const obj = this && this.textUI && this.rm ? this : {
+ textUI: textScene.text('').font({
+ weight: 'bolder',
+ }).fill('white'),
+
+ rm() {
+ if (this.textUI) {
+ this.textUI.remove();
+ }
+ },
+ };
+
+ const textPoint = window.cvat.translate.point.clientToCanvas(textScene.node,
+ clientBox.x, clientBox.y);
+
+ obj.textUI.clear().plain(text);
+ obj.textUI.addClass('shapeText');
+ obj.textUI.move(textPoint.x, textPoint.y);
+
+ return obj;
}
diff --git a/cvat/apps/engine/static/engine/js/attributeAnnotationMode.js b/cvat/apps/engine/static/engine/js/attributeAnnotationMode.js
index f091bb01a72b..ece1d78b9c66 100644
--- a/cvat/apps/engine/static/engine/js/attributeAnnotationMode.js
+++ b/cvat/apps/engine/static/engine/js/attributeAnnotationMode.js
@@ -14,8 +14,6 @@
SVG:false
*/
-"use strict";
-
const AAMUndefinedKeyword = '__undefined__';
class AAMModel extends Listener {
@@ -31,40 +29,27 @@ class AAMModel extends Listener {
this._currentShapes = [];
this._attrNumberByLabel = {};
this._helps = {};
- for (let labelId in window.cvat.labelsInfo.labels()) {
- let labelAttributes = window.cvat.labelsInfo.labelAttributes(labelId);
- if (Object.keys(labelAttributes).length) {
- this._attrNumberByLabel[labelId] = {
- current: 0,
- end: Object.keys(labelAttributes).length
- };
-
- for (let attrId in labelAttributes) {
- this._helps[attrId] = {
- title: `${window.cvat.labelsInfo.labels()[labelId]}, ${window.cvat.labelsInfo.attributes()[attrId]}`,
- help: getHelp(attrId),
- };
- }
- }
- }
function getHelp(attrId) {
- let attrInfo = window.cvat.labelsInfo.attrInfo(attrId);
- let help = [];
+ const attrInfo = window.cvat.labelsInfo.attrInfo(attrId);
+ const help = [];
switch (attrInfo.type) {
case 'checkbox':
- help.push('0 - ' + attrInfo.values[0]);
- help.push('1 - ' + !attrInfo.values[0]);
+ help.push(`0 - ${attrInfo.values[0]}`);
+ help.push(`1 - ${!attrInfo.values[0]}`);
break;
default:
- for (let idx = 0; idx < attrInfo.values.length; idx ++) {
- if (idx > 9) break;
- if (attrInfo.values[0] === AAMUndefinedKeyword) {
- if (!idx) continue;
- help.push(idx - 1 + ' - ' + attrInfo.values[idx]);
+ for (let idx = 0; idx < attrInfo.values.length; idx += 1) {
+ if (idx > 9) {
+ break;
}
- else {
- help.push(idx + ' - ' + attrInfo.values[idx]);
+ if (attrInfo.values[0] === AAMUndefinedKeyword) {
+ if (!idx) {
+ continue;
+ }
+ help.push(`${idx - 1} - ${attrInfo.values[idx]}`);
+ } else {
+ help.push(`${idx} - ${attrInfo.values[idx]}`);
}
}
}
@@ -72,35 +57,56 @@ class AAMModel extends Listener {
return help;
}
+ const labels = window.cvat.labelsInfo.labels();
+ for (const labelId in labels) {
+ if (Object.prototype.hasOwnProperty.call(labels, labelId)) {
+ const labelAttributes = window.cvat.labelsInfo.labelAttributes(labelId);
+ if (Object.keys(labelAttributes).length) {
+ this._attrNumberByLabel[labelId] = {
+ current: 0,
+ end: Object.keys(labelAttributes).length,
+ };
+
+ for (const attrId in labelAttributes) {
+ if (Object.prototype.hasOwnProperty.call(labelAttributes, attrId)) {
+ this._helps[attrId] = {
+ title: `${window.cvat.labelsInfo.labels()[labelId]}, ${window.cvat.labelsInfo.attributes()[attrId]}`,
+ help: getHelp(attrId),
+ };
+ }
+ }
+ }
+ }
+ }
+
shapeCollection.subscribe(this);
}
_bbRect(pos) {
if ('points' in pos) {
- let points = PolyShapeModel.convertStringToNumberArray(pos.points);
+ const points = PolyShapeModel.convertStringToNumberArray(pos.points);
let xtl = Number.MAX_SAFE_INTEGER;
let ytl = Number.MAX_SAFE_INTEGER;
let xbr = Number.MIN_SAFE_INTEGER;
let ybr = Number.MIN_SAFE_INTEGER;
- for (let point of points) {
+ for (const point of points) {
xtl = Math.min(xtl, point.x);
ytl = Math.min(ytl, point.y);
xbr = Math.max(xbr, point.x);
ybr = Math.max(ybr, point.y);
}
- return [xtl, ytl, xbr, ybr];
- }
- else {
- return [pos.xtl, pos.ytl, pos.xbr, pos.ybr];
+ return [xtl, ytl, xbr, ybr];
}
+ return [pos.xtl, pos.ytl, pos.xbr, pos.ybr];
}
_updateCollection() {
this._currentShapes = [];
- for (let shape of this._shapeCollection.currentShapes) {
- let labelAttributes = window.cvat.labelsInfo.labelAttributes(shape.model.label);
- if (Object.keys(labelAttributes).length && !shape.model.removed && !shape.interpolation.position.outside) {
+ for (const shape of this._shapeCollection.currentShapes) {
+ const labelAttributes = window.cvat.labelsInfo.labelAttributes(shape.model.label);
+ if (Object.keys(labelAttributes).length
+ && !shape.model.removed && !shape.interpolation.position.outside) {
this._currentShapes.push({
model: shape.model,
interpolation: shape.model.interpolate(window.cvat.player.frames.current),
@@ -111,8 +117,7 @@ class AAMModel extends Listener {
if (this._currentShapes.length) {
this._activeIdx = 0;
this._active = this._currentShapes[0].model;
- }
- else {
+ } else {
this._activeIdx = null;
this._active = null;
}
@@ -124,15 +129,16 @@ class AAMModel extends Listener {
_activate() {
if (this._activeAAM && this._active) {
- let label = this._active.label;
- let attrId = +this._attrIdByIdx(label, this._attrNumberByLabel[label].current);
+ const { label } = this._active;
+ const attrId = +this._attrIdByIdx(label, this._attrNumberByLabel[label].current);
- let [xtl, ytl, xbr, ybr] = this._bbRect(this._currentShapes[this._activeIdx].interpolation.position);
- this._focus(xtl - this._margin, xbr + this._margin, ytl - this._margin, ybr + this._margin);
+ const [xtl, ytl, xbr, ybr] = this._bbRect(this._currentShapes[this._activeIdx]
+ .interpolation.position);
+ this._focus(xtl - this._margin, xbr + this._margin,
+ ytl - this._margin, ybr + this._margin);
this.notify();
this._active.activeAttribute = attrId;
- }
- else {
+ } else {
this.notify();
}
}
@@ -170,8 +176,7 @@ class AAMModel extends Listener {
switchAAMMode() {
if (this._activeAAM) {
this._disable();
- }
- else {
+ } else {
this._enable();
}
}
@@ -184,14 +189,13 @@ class AAMModel extends Listener {
this._deactivate();
if (Math.sign(direction) < 0) {
// next
- this._activeIdx ++;
+ this._activeIdx += 1;
if (this._activeIdx >= this._currentShapes.length) {
this._activeIdx = 0;
}
- }
- else {
+ } else {
// prev
- this._activeIdx --;
+ this._activeIdx -= 1;
if (this._activeIdx < 0) {
this._activeIdx = this._currentShapes.length - 1;
}
@@ -206,7 +210,7 @@ class AAMModel extends Listener {
return;
}
- let curAttr = this._attrNumberByLabel[this._active.label];
+ const curAttr = this._attrNumberByLabel[this._active.label];
if (curAttr.end < 2) {
return;
@@ -214,14 +218,13 @@ class AAMModel extends Listener {
if (Math.sign(direction) > 0) {
// next
- curAttr.current ++;
+ curAttr.current += 1;
if (curAttr.current >= curAttr.end) {
curAttr.current = 0;
}
- }
- else {
+ } else {
// prev
- curAttr.current --;
+ curAttr.current -= 1;
if (curAttr.current < 0) {
curAttr.current = curAttr.end - 1;
}
@@ -233,10 +236,10 @@ class AAMModel extends Listener {
if (!this._activeAAM || !this._active) {
return;
}
- let label = this._active.label;
- let frame = window.cvat.player.frames.current;
- let attrId = this._attrIdByIdx(label, this._attrNumberByLabel[label].current);
- let attrInfo = window.cvat.labelsInfo.attrInfo(attrId);
+ const { label } = this._active;
+ const frame = window.cvat.player.frames.current;
+ const attrId = this._attrIdByIdx(label, this._attrNumberByLabel[label].current);
+ const attrInfo = window.cvat.labelsInfo.attrInfo(attrId);
if (key >= attrInfo.values.length) {
if (attrInfo.type === 'checkbox' && key < 2) {
this._active.updateAttribute(frame, attrId, !attrInfo.values[0]);
@@ -247,7 +250,7 @@ class AAMModel extends Listener {
if (key >= attrInfo.values.length - 1) {
return;
}
- key ++;
+ key += 1;
}
this._active.updateAttribute(frame, attrId, attrInfo.values[key]);
}
@@ -262,13 +265,11 @@ class AAMModel extends Listener {
generateHelps() {
if (this._active) {
- let label = this._active.label;
- let attrId = +this._attrIdByIdx(label, this._attrNumberByLabel[label].current);
+ const { label } = this._active;
+ const attrId = +this._attrIdByIdx(label, this._attrNumberByLabel[label].current);
return [this._helps[attrId].title, this._helps[attrId].help, `${this._activeIdx + 1}/${this._currentShapes.length}`];
}
- else {
- return ['No Shapes Found', '', '0/0'];
- }
+ return ['No Shapes Found', '', '0/0'];
}
get activeAAM() {
@@ -285,60 +286,57 @@ class AAMModel extends Listener {
}
-
class AAMController {
constructor(aamModel) {
this._model = aamModel;
- setupAAMShortkeys.call(this);
-
function setupAAMShortkeys() {
- let switchAAMHandler = Logger.shortkeyLogDecorator(function() {
+ const switchAAMHandler = Logger.shortkeyLogDecorator(() => {
this._model.switchAAMMode();
- }.bind(this));
+ });
- let nextAttributeHandler = Logger.shortkeyLogDecorator(function(e) {
+ const nextAttributeHandler = Logger.shortkeyLogDecorator((e) => {
this._model.moveAttr(1);
e.preventDefault();
- }.bind(this));
+ });
- let prevAttributeHandler = Logger.shortkeyLogDecorator(function(e) {
+ const prevAttributeHandler = Logger.shortkeyLogDecorator((e) => {
this._model.moveAttr(-1);
e.preventDefault();
- }.bind(this));
+ });
- let nextShapeHandler = Logger.shortkeyLogDecorator(function(e) {
+ const nextShapeHandler = Logger.shortkeyLogDecorator((e) => {
this._model.moveShape(1);
e.preventDefault();
- }.bind(this));
+ });
- let prevShapeHandler = Logger.shortkeyLogDecorator(function(e) {
+ const prevShapeHandler = Logger.shortkeyLogDecorator((e) => {
this._model.moveShape(-1);
e.preventDefault();
- }.bind(this));
+ });
- let selectAttributeHandler = Logger.shortkeyLogDecorator(function(e) {
+ const selectAttributeHandler = Logger.shortkeyLogDecorator((e) => {
let key = e.keyCode;
if (key >= 48 && key <= 57) {
- key -= 48; // 0 and 9
- }
- else if (key >= 96 && key <= 105) {
+ key -= 48; // 0 and 9
+ } else if (key >= 96 && key <= 105) {
key -= 96; // num 0 and 9
- }
- else {
+ } else {
return;
}
this._model.setupAttributeValue(key);
- }.bind(this));
-
- let shortkeys = window.cvat.config.shortkeys;
- Mousetrap.bind(shortkeys["switch_aam_mode"].value, switchAAMHandler, 'keydown');
- Mousetrap.bind(shortkeys["aam_next_attribute"].value, nextAttributeHandler, 'keydown');
- Mousetrap.bind(shortkeys["aam_prev_attribute"].value, prevAttributeHandler, 'keydown');
- Mousetrap.bind(shortkeys["aam_next_shape"].value, nextShapeHandler, 'keydown');
- Mousetrap.bind(shortkeys["aam_prev_shape"].value, prevShapeHandler, 'keydown');
- Mousetrap.bind(shortkeys["select_i_attribute"].value, selectAttributeHandler, 'keydown');
+ });
+
+ const { shortkeys } = window.cvat.config;
+ Mousetrap.bind(shortkeys.switch_aam_mode.value, switchAAMHandler, 'keydown');
+ Mousetrap.bind(shortkeys.aam_next_attribute.value, nextAttributeHandler, 'keydown');
+ Mousetrap.bind(shortkeys.aam_prev_attribute.value, prevAttributeHandler, 'keydown');
+ Mousetrap.bind(shortkeys.aam_next_shape.value, nextShapeHandler, 'keydown');
+ Mousetrap.bind(shortkeys.aam_prev_shape.value, prevShapeHandler, 'keydown');
+ Mousetrap.bind(shortkeys.select_i_attribute.value, selectAttributeHandler, 'keydown');
}
+
+ setupAAMShortkeys.call(this);
}
setMargin(value) {
@@ -359,15 +357,15 @@ class AAMView {
this._controller = aamController;
this._zoomMargin.on('change', (e) => {
- let value = +e.target.value;
+ const value = +e.target.value;
this._controller.setMargin(value);
}).trigger('change');
aamModel.subscribe(this);
}
_setupAAMView(active, type, pos) {
- let oldRect = $('#outsideRect');
- let oldMask = $('#outsideMask');
+ const oldRect = $('#outsideRect');
+ const oldMask = $('#outsideMask');
if (active) {
if (oldRect.length) {
@@ -375,45 +373,47 @@ class AAMView {
oldMask.remove();
}
- let size = window.cvat.translate.box.actualToCanvas({
+ const size = window.cvat.translate.box.actualToCanvas({
x: 0,
y: 0,
width: window.cvat.player.geometry.frameWidth,
- height: window.cvat.player.geometry.frameHeight
+ height: window.cvat.player.geometry.frameHeight,
});
- let excludeField = this._frameContent.rect(size.width, size.height).move(size.x, size.y).fill('#666');
+ const excludeField = this._frameContent.rect(size.width, size.height).move(size.x, size.y).fill('#666');
let includeField = null;
if (type === 'box') {
pos = window.cvat.translate.box.actualToCanvas(pos);
- includeField = this._frameContent.rect(pos.xbr - pos.xtl, pos.ybr - pos.ytl).move(pos.xtl, pos.ytl);
- }
- else {
+ includeField = this._frameContent.rect(pos.xbr - pos.xtl,
+ pos.ybr - pos.ytl).move(pos.xtl, pos.ytl);
+ } else {
pos.points = window.cvat.translate.points.actualToCanvas(pos.points);
includeField = this._frameContent.polygon(pos.points);
}
- this._frameContent.mask().add(excludeField).add(includeField).fill('black').attr('id', 'outsideMask');
- this._frameContent.rect(size.width, size.height).move(size.x, size.y).attr({
- mask: 'url(#outsideMask)',
- id: 'outsideRect'
- });
+ this._frameContent.mask().add(excludeField)
+ .add(includeField).fill('black')
+ .attr('id', 'outsideMask');
+ this._frameContent.rect(size.width, size.height)
+ .move(size.x, size.y).attr({
+ mask: 'url(#outsideMask)',
+ id: 'outsideRect',
+ });
- let content = $(this._frameContent.node);
- let texts = content.find('.shapeText');
- for (let text of texts) {
+ const content = $(this._frameContent.node);
+ const texts = content.find('.shapeText');
+ for (const text of texts) {
content.append(text);
}
- }
- else {
+ } else {
oldRect.remove();
oldMask.remove();
}
}
onAAMUpdate(aam) {
- this._setupAAMView(aam.active ? true : false,
+ this._setupAAMView(Boolean(aam.active),
aam.active ? aam.active.type.split('_')[1] : '',
aam.active ? aam.active.interpolate(window.cvat.player.frames.current).position : 0);
@@ -423,20 +423,17 @@ class AAMView {
this._aamMenu.removeClass('hidden');
}
- let [title, help, counter] = aam.generateHelps();
+ const [title, help, counter] = aam.generateHelps();
this._aamHelpContainer.empty();
this._aamCounter.text(counter);
this._aamTitle.text(title);
- for (let helpRow of help) {
+ for (const helpRow of help) {
$(` `).insertAfter($('#dashboardBugTrackerInput').parent().parent());
- repositorySyncButton.on("click", () => {
- function badResponse(message) {
- try {
- showMessage(message);
- throw Error(message);
- }
- finally {
- window.cvat.git.updateState();
- }
- }
- gitLabelMessage.css("color", "#cccc00").text("Synchronization..");
- gitLabelStatus.css("color", "#cccc00").text("\u25cc");
- repositorySyncButton.attr("disabled", true);
+ DashboardView.registerDecorator('createTask', (taskData, next, onFault) => {
+ const taskMessage = $('#dashboardCreateTaskMessage');
- let tid = gitWindow.attr("current_tid");
- $.get(`/git/repository/push/${tid}`).done((data) => {
- setTimeout(timeoutCallback, 1000);
+ const path = $(`#${createURLInputTextId}`).prop('value').replace(/\s/g, '');
+ const lfs = $(`#${lfsCheckboxId}`).prop('checked');
- function timeoutCallback() {
- $.get(`/git/repository/check/${data.rq_id}`).done((data) => {
- if (["finished", "failed", "unknown"].indexOf(data.status) != -1) {
- if (data.status === "failed") {
- let message = data.error;
- badResponse(message);
- }
- else if (data.status === "unknown") {
- let message = `Request for pushing returned status "${data.status}".`;
- badResponse(message);
- }
- else {
- window.cvat.git.updateState();
+ if (path.length) {
+ taskMessage.css('color', 'blue');
+ taskMessage.text('Git repository is being cloned..');
+
+ $.ajax({
+ url: `/git/repository/create/${taskData.id}`,
+ type: 'POST',
+ data: JSON.stringify({
+ path,
+ lfs,
+ tid: taskData.id,
+ }),
+ contentType: 'application/json',
+ }).done((rqData) => {
+ function checkCallback() {
+ $.ajax({
+ url: `/git/repository/check/${rqData.rq_id}`,
+ type: 'GET',
+ }).done((statusData) => {
+ if (['queued', 'started'].includes(statusData.status)) {
+ setTimeout(checkCallback, 1000);
+ } else if (statusData.status === 'finished') {
+ taskMessage.css('color', 'blue');
+ taskMessage.text('Git repository has been cloned');
+ next();
+ } else if (statusData.status === 'failed') {
+ let message = 'Repository status check failed. ';
+ if (statusData.stderr) {
+ message += statusData.stderr;
+ }
+
+ taskMessage.css('color', 'red');
+ taskMessage.text(message);
+ onFault();
+ } else {
+ const message = `Repository status check returned the status "${statusData.status}"`;
+ taskMessage.css('color', 'red');
+ taskMessage.text(message);
+ onFault();
}
- }
- else {
- setTimeout(timeoutCallback, 1000);
- }
- }).fail((data) => {
- let message = `Error was occured during pushing an repos entry. ` +
- `Code: ${data.status}, text: ${data.responseText || data.statusText}`;
- badResponse(message);
- });
- }
- }).fail((data) => {
- let message = `Error was occured during pushing an repos entry. ` +
- `Code: ${data.status}, text: ${data.responseText || data.statusText}`;
- badResponse(message);
- });
+ }).fail((errorData) => {
+ const message = `Can not sent a request to clone the repository. Code: ${errorData.status}. `
+ + `Message: ${errorData.responseText || errorData.statusText}`;
+ taskMessage.css('color', 'red');
+ taskMessage.text(message);
+ onFault();
+ });
+ }
+
+ setTimeout(checkCallback, 1000);
+ }).fail((errorData) => {
+ const message = `Can not sent a request to clone the repository. Code: ${errorData.status}. `
+ + `Message: ${errorData.responseText || errorData.statusText}`;
+ taskMessage.css('color', 'red');
+ taskMessage.text(message);
+ onFault();
+ });
+ } else {
+ next();
+ }
});
});
diff --git a/cvat/apps/git/urls.py b/cvat/apps/git/urls.py
index 7a168290083b..317ddf82ed56 100644
--- a/cvat/apps/git/urls.py
+++ b/cvat/apps/git/urls.py
@@ -8,6 +8,7 @@
urlpatterns = [
+ path('create/', views.create),
path('get/', views.get_repository),
path('push/', views.push_repository),
path('check/', views.check_process),
diff --git a/cvat/apps/git/views.py b/cvat/apps/git/views.py
index 5cc6e23baf0b..770f0678f3e9 100644
--- a/cvat/apps/git/views.py
+++ b/cvat/apps/git/views.py
@@ -12,6 +12,7 @@
import cvat.apps.git.git as CVATGit
import django_rq
+import json
@login_required
def check_process(request, rq_id):
@@ -21,11 +22,11 @@ def check_process(request, rq_id):
if rq_job is not None:
if rq_job.is_queued or rq_job.is_started:
- return JsonResponse({"status": "processing"})
+ return JsonResponse({"status": rq_job.get_status()})
elif rq_job.is_finished:
- return JsonResponse({"status": "finished"})
+ return JsonResponse({"status": rq_job.get_status()})
else:
- return JsonResponse({"status": "failed", "error": rq_job.exc_info})
+ return JsonResponse({"status": rq_job.get_status(), "stderr": rq_job.exc_info})
else:
return JsonResponse({"status": "unknown"})
except Exception as ex:
@@ -33,6 +34,26 @@ def check_process(request, rq_id):
return HttpResponseBadRequest(str(ex))
+@login_required
+@permission_required(perm=['engine.task.create'],
+ fn=objectgetter(models.Task, 'tid'), raise_exception=True)
+def create(request, tid):
+ try:
+ slogger.task[tid].info("create repository request")
+
+ body = json.loads(request.body.decode('utf-8'))
+ path = body["path"]
+ lfs = body["lfs"]
+ rq_id = "git.create.{}".format(tid)
+ queue = django_rq.get_queue("default")
+
+ queue.enqueue_call(func = CVATGit.initial_create, args = (tid, path, lfs, request.user), job_id = rq_id)
+ return JsonResponse({ "rq_id": rq_id })
+ except Exception as ex:
+ slogger.glob.error("error occured during initial cloning repository request with rq id {}".format(rq_id), exc_info=True)
+ return HttpResponseBadRequest(str(ex))
+
+
@login_required
@permission_required(perm=['engine.task.access'],
fn=objectgetter(models.Task, 'tid'), raise_exception=True)
diff --git a/cvat/apps/log_viewer/__init__.py b/cvat/apps/log_viewer/__init__.py
index 3c7cca70670d..4c96c2ab030f 100644
--- a/cvat/apps/log_viewer/__init__.py
+++ b/cvat/apps/log_viewer/__init__.py
@@ -4,4 +4,4 @@
from cvat.settings.base import JS_3RDPARTY
-JS_3RDPARTY['dashboard'] = JS_3RDPARTY.get('dashboard', []) + ['log_viewer/js/shortcuts.js']
+JS_3RDPARTY['dashboard'] = JS_3RDPARTY.get('dashboard', []) + ['log_viewer/js/dashboardPlugin.js']
diff --git a/cvat/apps/log_viewer/static/log_viewer/js/dashboardPlugin.js b/cvat/apps/log_viewer/static/log_viewer/js/dashboardPlugin.js
new file mode 100644
index 000000000000..e40e279283d4
--- /dev/null
+++ b/cvat/apps/log_viewer/static/log_viewer/js/dashboardPlugin.js
@@ -0,0 +1,11 @@
+/*
+ * Copyright (C) 2018 Intel Corporation
+ *
+ * SPDX-License-Identifier: MIT
+ */
+
+window.addEventListener('DOMContentLoaded', () => {
+ $('').on('click', () => {
+ window.open('/analytics/app/kibana');
+ }).appendTo('#dashboardManageButtons');
+});
diff --git a/cvat/apps/log_viewer/static/log_viewer/js/shortcuts.js b/cvat/apps/log_viewer/static/log_viewer/js/shortcuts.js
deleted file mode 100644
index 880178ca7d03..000000000000
--- a/cvat/apps/log_viewer/static/log_viewer/js/shortcuts.js
+++ /dev/null
@@ -1,15 +0,0 @@
-/*
- * Copyright (C) 2018 Intel Corporation
- *
- * SPDX-License-Identifier: MIT
- */
-
-/* global
- Mousetrap:false
-*/
-
-Mousetrap.bind(window.cvat.config.shortkeys["open_analytics"].value, function() {
- window.open("/analytics/app/kibana");
-
- return false;
-});
\ No newline at end of file
diff --git a/cvat/apps/reid/reid.py b/cvat/apps/reid/reid.py
index 915359b3e11a..59fce1cbfe1b 100644
--- a/cvat/apps/reid/reid.py
+++ b/cvat/apps/reid/reid.py
@@ -82,10 +82,10 @@ def __del__(self):
def __boxes_are_compatible(self, cur_box, next_box):
- cur_c_x = (cur_box["xtl"] + cur_box["xbr"]) / 2
- cur_c_y = (cur_box["ytl"] + cur_box["ybr"]) / 2
- next_c_x = (next_box["xtl"] + next_box["xbr"]) / 2
- next_c_y = (next_box["ytl"] + next_box["ybr"]) / 2
+ cur_c_x = (cur_box["points"][0] + cur_box["points"][2]) / 2
+ cur_c_y = (cur_box["points"][1] + cur_box["points"][3]) / 2
+ next_c_x = (next_box["points"][0] + next_box["points"][2]) / 2
+ next_c_y = (next_box["points"][1] + next_box["points"][3]) / 2
compatible_distance = euclidean([cur_c_x, cur_c_y], [next_c_x, next_c_y]) <= self.__max_distance
compatible_label = cur_box["label_id"] == next_box["label_id"]
return compatible_distance and compatible_label and "path_id" not in next_box
@@ -123,8 +123,8 @@ def _int(number, upper):
cur_width = cur_image.shape[1]
cur_height = cur_image.shape[0]
cur_xtl, cur_xbr, cur_ytl, cur_ybr = (
- _int(cur_box["xtl"], cur_width), _int(cur_box["xbr"], cur_width),
- _int(cur_box["ytl"], cur_height), _int(cur_box["ybr"], cur_height)
+ _int(cur_box["points"][0], cur_width), _int(cur_box["points"][2], cur_width),
+ _int(cur_box["points"][1], cur_height), _int(cur_box["points"][3], cur_height)
)
for col, next_box in enumerate(next_boxes):
@@ -132,8 +132,8 @@ def _int(number, upper):
next_width = next_image.shape[1]
next_height = next_image.shape[0]
next_xtl, next_xbr, next_ytl, next_ybr = (
- _int(next_box["xtl"], next_width), _int(next_box["xbr"], next_width),
- _int(next_box["ytl"], next_height), _int(next_box["ybr"], next_height)
+ _int(next_box["points"][0], next_width), _int(next_box["points"][2], next_width),
+ _int(next_box["points"][1], next_height), _int(next_box["points"][3], next_height)
)
if not self.__boxes_are_compatible(cur_box, next_box):
@@ -149,7 +149,7 @@ def _int(number, upper):
def __apply_matching(self):
frames = sorted(list(self.__frame_boxes.keys()))
job = rq.get_current_job()
- box_paths = {}
+ box_tracks = {}
for idx, (cur_frame, next_frame) in enumerate(list(zip(frames[:-1], frames[1:]))):
job.refresh()
@@ -164,8 +164,8 @@ def __apply_matching(self):
for box in cur_boxes:
if "path_id" not in box:
- path_id = len(box_paths)
- box_paths[path_id] = [box]
+ path_id = len(box_tracks)
+ box_tracks[path_id] = [box]
box["path_id"] = path_id
if not (len(cur_boxes) and len(next_boxes)):
@@ -180,38 +180,39 @@ def __apply_matching(self):
cur_box = cur_boxes[cur_idx]
next_box = next_boxes[next_idxs[idx]]
next_box["path_id"] = cur_box["path_id"]
- box_paths[cur_box["path_id"]].append(next_box)
+ box_tracks[cur_box["path_id"]].append(next_box)
for box in self.__frame_boxes[frames[-1]]:
if "path_id" not in box:
- path_id = len(box_paths)
+ path_id = len(box_tracks)
box["path_id"] = path_id
- box_paths[path_id] = [box]
+ box_tracks[path_id] = [box]
- return box_paths
+ return box_tracks
def run(self):
- box_paths = self.__apply_matching()
+ box_tracks = self.__apply_matching()
output = []
# ReID process has been canceled
- if box_paths is None:
+ if box_tracks is None:
return
- for path_id in box_paths:
+ for path_id in box_tracks:
output.append({
- "label_id": box_paths[path_id][0]["label_id"],
- "group_id": 0,
+ "label_id": box_tracks[path_id][0]["label_id"],
+ "group": None,
"attributes": [],
- "frame": box_paths[path_id][0]["frame"],
- "shapes": box_paths[path_id]
+ "frame": box_tracks[path_id][0]["frame"],
+ "shapes": box_tracks[path_id]
})
for box in output[-1]["shapes"]:
- del box["id"]
+ if "id" in box:
+ del box["id"]
del box["path_id"]
- del box["group_id"]
+ del box["group"]
del box["label_id"]
box["outside"] = False
box["attributes"] = []
diff --git a/cvat/apps/reid/static/reid/js/enginePlugin.js b/cvat/apps/reid/static/reid/js/enginePlugin.js
index 6bf14377cf5a..28fabb82319c 100644
--- a/cvat/apps/reid/static/reid/js/enginePlugin.js
+++ b/cvat/apps/reid/static/reid/js/enginePlugin.js
@@ -8,97 +8,96 @@
document.addEventListener('DOMContentLoaded', () => {
- function run(overlay, cancelButton, thresholdInput, distanceInput) {
+ async function run(overlay, cancelButton, thresholdInput, distanceInput) {
const collection = window.cvat.data.get();
const data = {
threshold: +thresholdInput.prop('value'),
maxDistance: +distanceInput.prop('value'),
- boxes: collection.boxes,
+ boxes: collection.shapes.filter(el => el.type === 'rectangle'),
};
overlay.removeClass('hidden');
cancelButton.prop('disabled', true);
- $.ajax({
- url: `reid/start/job/${window.cvat.job.id}`,
- type: 'POST',
- data: JSON.stringify(data),
- contentType: 'application/json',
- success: () => {
- function checkCallback() {
- $.ajax({
- url: `/reid/check/${window.cvat.job.id}`,
- type: 'GET',
- success: (jobData) => {
- if (jobData.progress) {
- cancelButton.text(`Cancel ReID Merge (${jobData.progress.toString().slice(0, 4)}%)`);
- }
-
- if (['queued', 'started'].includes(jobData.status)) {
- setTimeout(checkCallback, 1000);
- } else {
- overlay.addClass('hidden');
-
- if (jobData.status === 'finished') {
- if (jobData.result) {
- collection.boxes = [];
- collection.box_paths = collection.box_paths
- .concat(JSON.parse(jobData.result));
- window.cvat.data.clear();
- window.cvat.data.set(collection);
- showMessage('ReID merge has done.');
- } else {
- showMessage('ReID merge been canceled.');
- }
- } else if (jobData.status === 'failed') {
- const message = `ReID merge has fallen. Error: '${jobData.stderr}'`;
- showMessage(message);
- } else {
- let message = `Check request returned "${jobData.status}" status.`;
- if (jobData.stderr) {
- message += ` Error: ${jobData.stderr}`;
- }
- showMessage(message);
- }
- }
- },
- error: (errorData) => {
- overlay.addClass('hidden');
- const message = `Can not check ReID merge. Code: ${errorData.status}. Message: ${errorData.responseText || errorData.statusText}`;
- showMessage(message);
- },
- });
- }
- setTimeout(checkCallback, 1000);
- },
- error: (errorData) => {
+ async function checkCallback() {
+ let jobData = null;
+ try {
+ jobData = await $.get(`/reid/check/${window.cvat.job.id}`);
+ } catch (errorData) {
overlay.addClass('hidden');
- const message = `Can not start ReID merge. Code: ${errorData.status}. Message: ${errorData.responseText || errorData.statusText}`;
+ const message = `Can not check ReID merge. Code: ${errorData.status}. `
+ + `Message: ${errorData.responseText || errorData.statusText}`;
showMessage(message);
- },
- complete: () => {
- cancelButton.prop('disabled', false);
- },
- });
- }
+ }
- function cancel(overlay, cancelButton) {
- cancelButton.prop('disabled', true);
- $.ajax({
- url: `/reid/cancel/${window.cvat.job.id}`,
- type: 'GET',
- success: () => {
+ if (jobData.progress) {
+ cancelButton.text(`Cancel ReID Merge (${jobData.progress.toString().slice(0, 4)}%)`);
+ }
+
+ if (['queued', 'started'].includes(jobData.status)) {
+ setTimeout(checkCallback, 1000);
+ } else {
overlay.addClass('hidden');
- cancelButton.text('Cancel ReID Merge (0%)');
- },
- error: (errorData) => {
- const message = `Can not cancel ReID process. Code: ${errorData.status}. Message: ${errorData.responseText || errorData.statusText}`;
- showMessage(message);
- },
- complete: () => {
- cancelButton.prop('disabled', false);
+
+ if (jobData.status === 'finished') {
+ if (jobData.result) {
+ const result = JSON.parse(jobData.result);
+ collection.shapes = collection.shapes
+ .filter(el => el.type !== 'rectangle');
+ collection.tracks = collection.tracks
+ .concat(result);
+
+ window.cvat.data.clear();
+ window.cvat.data.set(collection);
+
+ showMessage('ReID merge has done.');
+ } else {
+ showMessage('ReID merge been canceled.');
+ }
+ } else if (jobData.status === 'failed') {
+ const message = `ReID merge has fallen. Error: '${jobData.stderr}'`;
+ showMessage(message);
+ } else {
+ let message = `Check request returned "${jobData.status}" status.`;
+ if (jobData.stderr) {
+ message += ` Error: ${jobData.stderr}`;
+ }
+ showMessage(message);
+ }
}
- });
+ }
+
+ try {
+ await $.ajax({
+ url: `/reid/start/job/${window.cvat.job.id}`,
+ type: 'POST',
+ data: JSON.stringify(data),
+ contentType: 'application/json',
+ });
+
+ setTimeout(checkCallback, 1000);
+ } catch (errorData) {
+ overlay.addClass('hidden');
+ const message = `Can not start ReID merge. Code: ${errorData.status}. `
+ + `Message: ${errorData.responseText || errorData.statusText}`;
+ showMessage(message);
+ } finally {
+ cancelButton.prop('disabled', false);
+ }
+ }
+
+ async function cancel(overlay, cancelButton) {
+ cancelButton.prop('disabled', true);
+ try {
+ await $.get(`/reid/cancel/${window.cvat.job.id}`);
+ overlay.addClass('hidden');
+ cancelButton.text('Cancel ReID Merge (0%)');
+ } catch (errorData) {
+ const message = `Can not cancel ReID process. Code: ${errorData.status}. Message: ${errorData.responseText || errorData.statusText}`;
+ showMessage(message);
+ } finally {
+ cancelButton.prop('disabled', false);
+ }
}
const buttonsUI = $('#engineMenuButtons');
@@ -121,13 +120,13 @@ document.addEventListener('DOMContentLoaded', () => {
-
-
-
-
-
-
+
+
+
{% endblock %}
diff --git a/cvat/apps/dashboard/templates/dashboard/task.html b/cvat/apps/dashboard/templates/dashboard/task.html
deleted file mode 100644
index 073fca8fadba..000000000000
--- a/cvat/apps/dashboard/templates/dashboard/task.html
+++ /dev/null
@@ -1,38 +0,0 @@
-
-
+
-
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
diff --git a/cvat/apps/dashboard/tests.py b/cvat/apps/dashboard/tests.py
deleted file mode 100644
index ba48a2a3d51c..000000000000
--- a/cvat/apps/dashboard/tests.py
+++ /dev/null
@@ -1,7 +0,0 @@
-
-# Copyright (C) 2018 Intel Corporation
-#
-# SPDX-License-Identifier: MIT
-
-# Create your tests here.
-
diff --git a/cvat/apps/dashboard/urls.py b/cvat/apps/dashboard/urls.py
index 3f732f0e3ed5..d05317f7901c 100644
--- a/cvat/apps/dashboard/urls.py
+++ b/cvat/apps/dashboard/urls.py
@@ -7,7 +7,7 @@
from . import views
urlpatterns = [
- path('get_share_nodes', views.JsTreeView),
path('', views.DashboardView),
+ path('meta', views.DashboardMeta),
]
diff --git a/cvat/apps/dashboard/views.py b/cvat/apps/dashboard/views.py
index 1b2f81dccc84..3ba6b102ba84 100644
--- a/cvat/apps/dashboard/views.py
+++ b/cvat/apps/dashboard/views.py
@@ -9,73 +9,22 @@
from django.conf import settings
from cvat.apps.authentication.decorators import login_required
-from cvat.apps.engine.models import Task as TaskModel, Job as JobModel
from cvat.settings.base import JS_3RDPARTY, CSS_3RDPARTY
import os
-def ScanNode(directory):
- if '..' in directory.split(os.path.sep):
- return HttpResponseBadRequest('Permission Denied')
-
- act_dir = os.path.normpath(settings.SHARE_ROOT + directory)
- result = []
-
- nodes = os.listdir(act_dir)
- files = filter(os.path.isfile, map(lambda f: os.path.join(act_dir, f), nodes))
- dirs = filter(os.path.isdir, map(lambda d: os.path.join(act_dir, d), nodes))
-
- for d in dirs:
- name = os.path.basename(d)
- children = len(os.listdir(d)) > 0
- node = {'id': directory + name + '/', 'text': name, 'children': children}
- result.append(node)
-
- for f in files:
- name = os.path.basename(f)
- node = {'id': directory + name, 'text': name, "icon" : "jstree-file"}
- result.append(node)
-
- return result
-
-@login_required
-def JsTreeView(request):
- node_id = None
- if 'id' in request.GET:
- node_id = request.GET['id']
-
- if node_id is None or node_id == '#':
- node_id = '/'
- response = [{"id": node_id, "text": node_id, "children": ScanNode(node_id)}]
- else:
- response = ScanNode(node_id)
-
- return JsonResponse(response, safe=False,
- json_dumps_params=dict(ensure_ascii=False))
-
-
@login_required
def DashboardView(request):
- query_name = request.GET['search'] if 'search' in request.GET else None
- query_job = int(request.GET['jid']) if 'jid' in request.GET and request.GET['jid'].isdigit() else None
- task_list = None
-
- if query_job is not None and JobModel.objects.filter(pk = query_job).exists():
- task_list = [JobModel.objects.select_related('segment__task').get(pk = query_job).segment.task]
- else:
- task_list = list(TaskModel.objects.prefetch_related('segment_set__job_set').order_by('-created_date').all())
- if query_name is not None:
- task_list = list(filter(lambda x: query_name.lower() in x.name.lower(), task_list))
-
- task_list = list(filter(lambda task: request.user.has_perm(
- 'engine.task.access', task), task_list))
-
return render(request, 'dashboard/dashboard.html', {
- 'data': task_list,
+ 'js_3rdparty': JS_3RDPARTY.get('dashboard', []),
+ 'css_3rdparty': CSS_3RDPARTY.get('dashboard', []),
+ })
+
+@login_required
+def DashboardMeta(request):
+ return JsonResponse({
'max_upload_size': settings.LOCAL_LOAD_MAX_FILES_SIZE,
'max_upload_count': settings.LOCAL_LOAD_MAX_FILES_COUNT,
'base_url': "{0}://{1}/".format(request.scheme, request.get_host()),
'share_path': os.getenv('CVAT_SHARE_URL', default=r'${cvat_root}/share'),
- 'js_3rdparty': JS_3RDPARTY.get('dashboard', []),
- 'css_3rdparty': CSS_3RDPARTY.get('dashboard', []),
- })
+ })
\ No newline at end of file
diff --git a/cvat/apps/dextr_segmentation/dextr.py b/cvat/apps/dextr_segmentation/dextr.py
index 35c9b64fa2c7..703c6d08398e 100644
--- a/cvat/apps/dextr_segmentation/dextr.py
+++ b/cvat/apps/dextr_segmentation/dextr.py
@@ -69,7 +69,6 @@ def handle(self, im_path, points):
input_dextr = np.concatenate((resized, heatmap[:, :, np.newaxis].astype(resized.dtype)), axis=2)
input_dextr = input_dextr.transpose((2,0,1))
- np.set_printoptions(threshold=np.nan)
pred = self._exec_network.infer(inputs={self._input_blob: input_dextr[np.newaxis, ...]})[self._output_blob][0, 0, :, :]
pred = cv2.resize(pred, tuple(reversed(numpy_cropped.shape[:2])), interpolation = cv2.INTER_CUBIC)
result = np.zeros(numpy_image.shape[:2])
diff --git a/cvat/apps/dextr_segmentation/views.py b/cvat/apps/dextr_segmentation/views.py
index de99caeedb5a..0a837b3abb49 100644
--- a/cvat/apps/dextr_segmentation/views.py
+++ b/cvat/apps/dextr_segmentation/views.py
@@ -8,7 +8,6 @@
from cvat.apps.engine.models import Job
from cvat.apps.engine.log import slogger
-from cvat.apps.engine.task import get_frame_path
from cvat.apps.dextr_segmentation.dextr import DEXTR_HANDLER
import django_rq
@@ -39,8 +38,8 @@ def create(request, jid):
slogger.job[jid].info("create dextr request for the JOB: {} ".format(jid)
+ "by the USER: {} on the FRAME: {}".format(username, frame))
- tid = Job.objects.select_related("segment__task").get(id=jid).segment.task.id
- im_path = os.path.realpath(get_frame_path(tid, frame))
+ db_task = Job.objects.select_related("segment__task").get(id=jid).segment.task
+ im_path = os.path.realpath(db_task.get_frame_path(frame))
queue = django_rq.get_queue(__RQ_QUEUE_NAME)
rq_id = "dextr.create/{}/{}".format(jid, username)
diff --git a/cvat/apps/documentation/__init__.py b/cvat/apps/documentation/__init__.py
index 6808b8600f35..743392981f00 100644
--- a/cvat/apps/documentation/__init__.py
+++ b/cvat/apps/documentation/__init__.py
@@ -5,5 +5,4 @@
from cvat.settings.base import JS_3RDPARTY
-JS_3RDPARTY['dashboard'] = JS_3RDPARTY.get('dashboard', []) + ['documentation/js/shortcuts.js']
-
+JS_3RDPARTY['dashboard'] = JS_3RDPARTY.get('dashboard', []) + ['documentation/js/dashboardPlugin.js']
diff --git a/cvat/apps/documentation/static/documentation/js/dashboardPlugin.js b/cvat/apps/documentation/static/documentation/js/dashboardPlugin.js
new file mode 100644
index 000000000000..27d7ed8d879f
--- /dev/null
+++ b/cvat/apps/documentation/static/documentation/js/dashboardPlugin.js
@@ -0,0 +1,11 @@
+/*
+ * Copyright (C) 2018 Intel Corporation
+ *
+ * SPDX-License-Identifier: MIT
+ */
+
+window.addEventListener('DOMContentLoaded', () => {
+ $('').on('click', () => {
+ window.open('/documentation/user_guide.html');
+ }).appendTo('#dashboardManageButtons');
+});
diff --git a/cvat/apps/documentation/static/documentation/js/shortcuts.js b/cvat/apps/documentation/static/documentation/js/shortcuts.js
deleted file mode 100644
index 5d6839a3ffeb..000000000000
--- a/cvat/apps/documentation/static/documentation/js/shortcuts.js
+++ /dev/null
@@ -1,15 +0,0 @@
-/*
- * Copyright (C) 2018 Intel Corporation
- *
- * SPDX-License-Identifier: MIT
- */
-
-/* global
- Mousetrap:false
-*/
-
-Mousetrap.bind(window.cvat.config.shortkeys["open_help"].value, function() {
- window.open("/documentation/user_guide.html");
-
- return false;
-});
diff --git a/cvat/apps/documentation/user_guide.md b/cvat/apps/documentation/user_guide.md
index d6d60442f3d7..f40fb96004e5 100644
--- a/cvat/apps/documentation/user_guide.md
+++ b/cvat/apps/documentation/user_guide.md
@@ -61,8 +61,6 @@ There you can:
__Source__. To create huge tasks please use ``shared`` server directory (choose ``Share`` option in the dialog).
- __Flip images__. All selected files will be turned around 180.
-
__Z-Order__. Defines the order on drawn polygons. Check the box for enable layered displaying.
__Overlap Size__. Use this option to make overlapped segments. The option makes tracks continuous from one segment into another. Use it for interpolation mode. There are several use cases for the parameter:
@@ -116,7 +114,7 @@ Usage examples:
![](static/documentation/images/image082.jpg) ![](static/documentation/images/image081.jpg)
2. Create a new annotation:
-
+
- Choose right ``Shape`` (box etc.) and ``Label`` (was specified by you while creating the task) beforehand:
![](static/documentation/images/image080.jpg) ![](static/documentation/images/image083.jpg)
@@ -294,7 +292,7 @@ By clicking on the points of poly-shapes ``Remove`` option is available.
By clicking outside any shapes you can either copy ``Frame URL`` (link to present frame) or ``Job URL`` (link from address bar)
-![](static/documentation/images/image091.jpg)
+![](static/documentation/images/image091.jpg)
---
### Settings
@@ -574,7 +572,7 @@ When ``Shift`` isn't pressed, you can zoom in/out (on mouse wheel scroll) and mo
![](static/documentation/images/gif007.gif)
-Also you can set fixed number of points in the field "poly shape size", then drawing will be stopped automatically.
+Also you can set fixed number of points in the field "poly shape size", then drawing will be stopped automatically.
To enable dragging, right-click inside polygon and choose ``Enable Dragging``.
diff --git a/cvat/apps/engine/__init__.py b/cvat/apps/engine/__init__.py
index b66dde17a5cf..8c438336d433 100644
--- a/cvat/apps/engine/__init__.py
+++ b/cvat/apps/engine/__init__.py
@@ -1,3 +1,5 @@
# Copyright (C) 2018 Intel Corporation
#
# SPDX-License-Identifier: MIT
+
+default_app_config = 'cvat.apps.engine.apps.EngineConfig'
diff --git a/cvat/apps/engine/admin.py b/cvat/apps/engine/admin.py
index 9cc6599c71b4..55786f89e455 100644
--- a/cvat/apps/engine/admin.py
+++ b/cvat/apps/engine/admin.py
@@ -56,8 +56,7 @@ def has_module_permission(self, request):
class TaskAdmin(admin.ModelAdmin):
date_hierarchy = 'updated_date'
- readonly_fields = ('size', 'path', 'created_date', 'updated_date',
- 'overlap', 'flipped')
+ readonly_fields = ('size', 'created_date', 'updated_date', 'overlap', 'flipped')
list_display = ('name', 'mode', 'owner', 'assignee', 'created_date', 'updated_date')
search_fields = ('name', 'mode', 'owner__username', 'owner__first_name',
'owner__last_name', 'owner__email', 'assignee__username', 'assignee__first_name',
diff --git a/cvat/apps/engine/annotation.py b/cvat/apps/engine/annotation.py
index c9935549f273..ef1d0bcccc1c 100644
--- a/cvat/apps/engine/annotation.py
+++ b/cvat/apps/engine/annotation.py
@@ -1,521 +1,128 @@
-
# Copyright (C) 2018 Intel Corporation
#
# SPDX-License-Identifier: MIT
import os
import copy
+from enum import Enum
from django.utils import timezone
-from collections import OrderedDict
import numpy as np
from scipy.optimize import linear_sum_assignment
from collections import OrderedDict
-from distutils.util import strtobool
from xml.sax.saxutils import XMLGenerator
from abc import ABCMeta, abstractmethod
from PIL import Image
+from shapely import geometry
-import django_rq
from django.conf import settings
from django.db import transaction
from cvat.apps.profiler import silk_profile
from cvat.apps.engine.plugins import plugin_decorator
from . import models
-from .task import get_frame_path, get_image_meta_cache
+from .task import get_image_meta_cache
from .log import slogger
+from . import serializers
+
+class PatchAction(str, Enum):
+ CREATE = "create"
+ UPDATE = "update"
+ DELETE = "delete"
+
+ @classmethod
+ def values(cls):
+ return [item.value for item in cls]
-############################# Low Level server API
-
-FORMAT_XML = 1
-FORMAT_JSON = 2
-
-def dump(tid, data_format, scheme, host):
- """
- Dump annotation for the task in specified data format.
- """
- queue = django_rq.get_queue('default')
- queue.enqueue_call(func=_dump, args=(tid, data_format, scheme, host, OrderedDict()),
- job_id="annotation.dump/{}".format(tid))
-
-def check(tid):
- """
- Check that potentially long operation 'dump' is completed.
- Return the status as json/dictionary object.
- """
- queue = django_rq.get_queue('default')
- job = queue.fetch_job("annotation.dump/{}".format(tid))
- if job is None:
- response = {"state": "unknown"}
- elif job.is_failed:
- # FIXME: here we have potential race. In general job.exc_info is
- # initialized inside handler but the method can be called before
- # that. By a reason exc_info isn't initialized by RQ python.
- response = {
- "state": "error",
- "stderr": job.exc_info}
- elif job.is_finished:
- response = {"state": "created"}
- else:
- response = {"state": "started"}
-
- return response
+ def __str__(self):
+ return self.value
+@silk_profile(name="GET job data")
@transaction.atomic
-def get(jid):
- """
- Get annotations for the job.
- """
- db_job = models.Job.objects.select_for_update().get(id=jid)
- annotation = _AnnotationForJob(db_job)
+def get_job_data(pk, user):
+ annotation = JobAnnotation(pk, user)
annotation.init_from_db()
- return annotation.to_client()
+ return annotation.data
-@silk_profile(name="Save job")
-@plugin_decorator
+@silk_profile(name="POST job data")
@transaction.atomic
-def save_job(jid, data):
- """
- Save new annotations for the job.
- """
- slogger.job[jid].info("Enter save_job API: jid = {}".format(jid))
- db_job = models.Job.objects.select_related('segment__task') \
- .select_for_update().get(id=jid)
+def put_job_data(pk, user, data):
+ annotation = JobAnnotation(pk, user)
+ annotation.put(data)
- annotation = _AnnotationForJob(db_job)
- annotation.force_set_client_id(data['create'])
- client_ids = annotation.validate_data_from_client(data)
+ return annotation.data
- annotation.delete_from_db(data['delete'])
- annotation.save_to_db(data['create'])
- annotation.update_in_db(data['update'])
+@silk_profile(name="UPDATE job data")
+@plugin_decorator
+@transaction.atomic
+def patch_job_data(pk, user, data, action):
+ annotation = JobAnnotation(pk, user)
+ if action == PatchAction.CREATE:
+ annotation.create(data)
+ elif action == PatchAction.UPDATE:
+ annotation.update(data)
+ elif action == PatchAction.DELETE:
+ annotation.delete(data)
+
+ return annotation.data
+
+@silk_profile(name="DELETE job data")
+@transaction.atomic
+def delete_job_data(pk, user):
+ annotation = JobAnnotation(pk, user)
+ annotation.delete()
- updated = sum([ len(data["update"][key]) for key in data["update"] ])
- deleted = sum([ len(data["delete"][key]) for key in data["delete"] ])
- created = sum([ len(data["create"][key]) for key in data["create"] ])
+@silk_profile(name="GET task data")
+@transaction.atomic
+def get_task_data(pk, user):
+ annotation = TaskAnnotation(pk, user)
+ annotation.init_from_db()
- if updated or deleted or created:
- db_job.segment.task.updated_date = timezone.now()
- db_job.segment.task.save()
+ return annotation.data
- db_job.max_shape_id = max(db_job.max_shape_id, max(client_ids['create']) if client_ids['create'] else -1)
- db_job.save()
+@silk_profile(name="POST task data")
+@transaction.atomic
+def put_task_data(pk, user, data):
+ annotation = TaskAnnotation(pk, user)
+ annotation.put(data)
- slogger.job[jid].info("Leave save_job API: jid = {}".format(jid))
+ return annotation.data
-@silk_profile(name="Clear job")
+@silk_profile(name="UPDATE task data")
@transaction.atomic
-def clear_job(jid):
- """
- Clear annotations for the job.
- """
- slogger.job[jid].info("Enter clear_job API: jid = {}".format(jid))
- db_job = models.Job.objects.select_related('segment__task') \
- .select_for_update().get(id=jid)
-
- annotation = _AnnotationForJob(db_job)
- annotation.delete_all_shapes_from_db()
- annotation.delete_all_paths_from_db()
-
- db_job.segment.task.updated_date = timezone.now()
- db_job.segment.task.save()
- slogger.job[jid].info("Leave clear_job API: jid = {}".format(jid))
-
-# pylint: disable=unused-argument
-@silk_profile(name="Save task")
-def save_task(tid, data):
- """
- Save new annotations for the task.
- """
- slogger.task[tid].info("Enter save_task API: tid = {}".format(tid))
- db_task = models.Task.objects.get(id=tid)
- db_segments = list(db_task.segment_set.prefetch_related('job_set').all())
-
- splitted_data = {}
-
- for segment in db_segments:
- jid = segment.job_set.first().id
- start = segment.start_frame
- stop = segment.stop_frame
- splitted_data[jid] = {}
- for action in ['create', 'update', 'delete']:
- splitted_data[jid][action] = {
- "boxes": copy.deepcopy(list(filter(lambda x: start <= int(x['frame']) <= stop, data[action]['boxes']))),
- "polygons": copy.deepcopy(list(filter(lambda x: start <= int(x['frame']) <= stop, data[action]['polygons']))),
- "polylines": copy.deepcopy(list(filter(lambda x: start <= int(x['frame']) <= stop, data[action]['polylines']))),
- "points": copy.deepcopy(list(filter(lambda x: start <= int(x['frame']) <= stop, data[action]['points']))),
- "box_paths": copy.deepcopy(list(filter(lambda x: len(list(filter(lambda y: (start <= int(y['frame']) <= stop) and (not y['outside']), x['shapes']))), data[action]['box_paths']))),
- "polygon_paths": copy.deepcopy(list(filter(lambda x: len(list(filter(lambda y: (start <= int(y['frame']) <= stop) and (not y['outside']), x['shapes']))), data[action]['polygon_paths']))),
- "polyline_paths": copy.deepcopy(list(filter(lambda x: len(list(filter(lambda y: (start <= int(y['frame']) <= stop) and (not y['outside']), x['shapes']))), data[action]['polyline_paths']))),
- "points_paths": copy.deepcopy(list(filter(lambda x: len(list(filter(lambda y: (start <= int(y['frame']) <= stop) and (not y['outside']), x['shapes']))), data[action]['points_paths']))),
- }
+def patch_task_data(pk, user, data, action):
+ annotation = TaskAnnotation(pk, user)
+ if action == PatchAction.CREATE:
+ annotation.create(data)
+ elif action == PatchAction.UPDATE:
+ annotation.update(data)
+ elif action == PatchAction.DELETE:
+ annotation.delete(data)
+
+ return annotation.data
+
+@silk_profile(name="DELETE task data")
+@transaction.atomic
+def delete_task_data(pk, user):
+ annotation = TaskAnnotation(pk, user)
+ annotation.delete()
- for jid, _data in splitted_data.items():
- # if an item inside _data isn't empty need to call save_job
- isNonEmpty = False
- for action in ['create', 'update', 'delete']:
- for objects in _data[action].values():
- if objects:
- isNonEmpty = True
- break
-
- if isNonEmpty:
- save_job(jid, _data)
-
- slogger.task[tid].info("Leave save_task API: tid = {}".format(tid))
-
-
-# pylint: disable=unused-argument
-@silk_profile(name="Clear task")
-def clear_task(tid):
- """
- Clear annotations for the task.
- """
- slogger.task[tid].info("Enter clear_task API: tid = {}".format(tid))
- db_task = models.Task.objects.get(id=tid)
- db_segments = list(db_task.segment_set.prefetch_related('job_set').all())
-
- for db_segment in db_segments:
- for db_job in list(db_segment.job_set.all()):
- clear_job(db_job.id)
-
- slogger.task[tid].info("Leave clear_task API: tid = {}".format(tid))
-
-# pylint: disable=unused-argument
-def rq_handler(job, exc_type, exc_value, traceback):
- tid = job.id.split('/')[1]
- slogger.task[tid].error("dump annotation error was occured", exc_info=True)
-
-##################################################
-
-class _Label:
- def __init__(self, db_label):
- self.id = db_label.id
- self.name = db_label.name
-
-class _Attribute:
- def __init__(self, db_attr, value):
- self.id = db_attr.id
- self.name = db_attr.get_name()
- if db_attr.get_type() == 'checkbox':
- self.value = str(value).lower()
- else:
- self.value = str(value)
-
-class _BoundingBox:
- def __init__(self, x0, y0, x1, y1, frame, occluded, z_order, client_id=None, attributes=None):
- self.xtl = x0
- self.ytl = y0
- self.xbr = x1
- self.ybr = y1
- self.occluded = occluded
- self.z_order = z_order
- self.client_id = client_id
- self.frame = frame
- self.attributes = attributes if attributes else []
-
- def merge(self, box):
- # The occluded property and attributes cannot be merged. Let's keep
- # original attributes and occluded property of the self object.
- assert self.frame == box.frame
- self.xtl = (self.xtl + box.xtl) / 2
- self.ytl = (self.ytl + box.ytl) / 2
- self.xbr = (self.xbr + box.xbr) / 2
- self.ybr = (self.ybr + box.ybr) / 2
-
- def add_attribute(self, attr):
- self.attributes.append(attr)
-
-class _LabeledBox(_BoundingBox):
- def __init__(self, label, x0, y0, x1, y1, frame, group_id, occluded, z_order, client_id=None, attributes=None):
- super().__init__(x0, y0, x1, y1, frame, occluded, z_order, client_id, attributes)
- self.label = label
- self.group_id = group_id
-
-class _TrackedBox(_BoundingBox):
- def __init__(self, x0, y0, x1, y1, frame, occluded, z_order, outside, attributes=None):
- super().__init__(x0, y0, x1, y1, frame, occluded, z_order, None, attributes)
- self.outside = outside
-
-class _InterpolatedBox(_TrackedBox):
- def __init__(self, x0, y0, x1, y1, frame, occluded, z_order, outside, keyframe, attributes=None):
- super().__init__(x0, y0, x1, y1, frame, occluded, z_order, outside, attributes)
- self.keyframe = keyframe
-
-class _PolyShape:
- def __init__(self, points, frame, occluded, z_order, client_id=None, attributes=None):
- self.points = points
- self.frame = frame
- self.occluded = occluded
- self.z_order = z_order
- self.client_id=client_id
- self.attributes = attributes if attributes else []
-
- def add_attribute(self, attr):
- self.attributes.append(attr)
-
-class _LabeledPolyShape(_PolyShape):
- def __init__(self, label, points, frame, group_id, occluded, z_order, client_id=None, attributes=None):
- super().__init__(points, frame, occluded, z_order, client_id, attributes)
- self.label = label
- self.group_id = group_id
-
-class _TrackedPolyShape(_PolyShape):
- def __init__(self, points, frame, occluded, z_order, outside, attributes=None):
- super().__init__(points, frame, occluded, z_order, None, attributes)
- self.outside = outside
-
-class _InterpolatedPolyShape(_TrackedPolyShape):
- def __init__(self, points, frame, occluded, z_order, outside, keyframe, attributes=None):
- super().__init__(points, frame, occluded, z_order, outside, attributes)
- self.keyframe = keyframe
-
-class _BoxPath:
- def __init__(self, label, start_frame, stop_frame, group_id, boxes=None, client_id=None, attributes=None):
- self.label = label
- self.frame = start_frame
- self.stop_frame = stop_frame
- self.group_id = group_id
- self.boxes = boxes if boxes else []
- self.client_id = client_id
- self.attributes = attributes if attributes else []
- self._interpolated_boxes = []
- assert not self.boxes or self.boxes[-1].frame <= self.stop_frame
-
- def add_box(self, box):
- self.boxes.append(box)
-
- def get_interpolated_boxes(self):
- if not self._interpolated_boxes:
- self._init_interpolated_boxes()
-
- return self._interpolated_boxes
-
- def _init_interpolated_boxes(self):
- assert self.boxes[-1].frame <= self.stop_frame
-
- boxes = []
- stop_box = copy.copy(self.boxes[-1])
- stop_box.frame = self.stop_frame + 1
- attributes = {}
- for box0, box1 in zip(self.boxes, self.boxes[1:] + [stop_box]):
- assert box0.frame < box1.frame
-
- distance = float(box1.frame - box0.frame)
- delta_xtl = (box1.xtl - box0.xtl) / distance
- delta_ytl = (box1.ytl - box0.ytl) / distance
- delta_xbr = (box1.xbr - box0.xbr) / distance
- delta_ybr = (box1.ybr - box0.ybr) / distance
-
- # New box doesn't have all attributes (only first one does).
- # Thus it is necessary to propagate them.
- for attr in box0.attributes:
- attributes[attr.id] = attr
-
- for frame in range(box0.frame, box1.frame):
- off = frame - box0.frame
- xtl = box0.xtl + delta_xtl * off
- ytl = box0.ytl + delta_ytl * off
- xbr = box0.xbr + delta_xbr * off
- ybr = box0.ybr + delta_ybr * off
-
- box = _InterpolatedBox(xtl, ytl, xbr, ybr, frame, box0.occluded, box0.z_order,
- box0.outside, box0.frame == frame, list(attributes.values()))
- boxes.append(box)
-
- if box0.outside:
- break
-
- self._interpolated_boxes = boxes
-
- def merge(self, path):
- assert self.label.id == path.label.id
- boxes = {box.frame:box for box in self.boxes}
- for box in path.boxes:
- if box.frame in boxes:
- boxes[box.frame].merge(box)
- else:
- boxes[box.frame] = box
-
- self.frame = min(self.frame, path.frame)
- self.stop_frame = max(self.stop_frame, path.stop_frame)
- self.boxes = list(sorted(boxes.values(), key=lambda box: box.frame))
- self._interpolated_boxes = []
-
- def add_attribute(self, attr):
- self.attributes.append(attr)
-
-class _PolyPath:
- def __init__(self, label, start_frame, stop_frame, group_id, shapes=None, client_id=None, attributes=None):
- self.label = label
- self.frame = start_frame
- self.stop_frame = stop_frame
- self.group_id = group_id
- self.shapes = shapes if shapes else []
- self.client_id = client_id
- self.attributes = attributes if attributes else []
- self._interpolated_shapes = [] # ???
-
- def add_shape(self, shape):
- self.shapes.append(shape)
-
- def get_interpolated_shapes(self):
- if not self._interpolated_shapes:
- self._init_interpolated_shapes()
-
- return self._interpolated_shapes
-
- def _init_interpolated_shapes(self):
- assert self.shapes[-1].frame <= self.stop_frame
- self._interpolated_shapes = []
- shapes = {shape.frame: shape for shape in self.shapes}
- outside = False
- attributes = {}
- for frame in range(self.frame, self.stop_frame + 1):
- if frame in shapes:
- for attr in shapes[frame].attributes:
- attributes[attr.id] = attr
- shape = _InterpolatedPolyShape(shapes[frame].points, frame,
- shapes[frame].occluded, shapes[frame].z_order, shapes[frame].outside, True, list(attributes.values()))
- outside = shape.outside
- self._interpolated_shapes.append(shape)
- elif not outside:
- shape = _InterpolatedPolyShape(self._interpolated_shapes[-1].points, frame, False,
- 0, True, True, list(attributes.values()))
- outside = shape.outside
- self._interpolated_shapes.append(shape)
-
- def merge(self, path):
- pass
- def add_attribute(self, attr):
- self.attributes.append(attr)
+def dump_task_data(pk, user, file_path, scheme, host, query_params):
+ # For big tasks dump function may run for a long time and
+ # we dont need to acquire lock after _AnnotationForTask instance
+ # has been initialized from DB.
+ # But there is the bug with corrupted dump file in case 2 or more dump request received at the same time.
+ # https://github.com/opencv/cvat/issues/217
+ with transaction.atomic():
+ annotation = TaskAnnotation(pk, user)
+ annotation.init_from_db()
-class _Annotation:
- def __init__(self, start_frame, stop_frame):
- self.start_frame = start_frame
- self.stop_frame = stop_frame
- self.reset()
+ annotation.dump(file_path, scheme, host, query_params)
- def reset(self):
- self.boxes = []
- self.box_paths = []
- self.polygons = []
- self.polygon_paths = []
- self.polylines = []
- self.polyline_paths = []
- self.points = []
- self.points_paths = []
-
- def has_data(self):
- non_empty = False
- for attr in ['boxes', 'box_paths', 'polygons', 'polygon_paths',
- 'polylines', 'polyline_paths', 'points', 'points_paths']:
- non_empty |= bool(getattr(self, attr))
-
- return non_empty
-
- # Functions below used by dump functionality
- def to_boxes(self):
- boxes = []
- for path in self.box_paths:
- for box in path.get_interpolated_boxes():
- if not box.outside:
- box = _LabeledBox(
- label=path.label,
- x0=box.xtl, y0=box.ytl, x1=box.xbr, y1=box.ybr,
- frame=box.frame,
- group_id=path.group_id,
- occluded=box.occluded,
- z_order=box.z_order,
- attributes=box.attributes + path.attributes,
- )
- boxes.append(box)
-
- return self.boxes + boxes
-
- def _to_poly_shapes(self, iter_attr_name):
- shapes = []
- for path in getattr(self, iter_attr_name):
- for shape in path.get_interpolated_shapes():
- if not shape.outside:
- shape = _LabeledPolyShape(
- label=path.label,
- points=shape.points,
- frame=shape.frame,
- group_id=path.group_id,
- occluded=shape.occluded,
- z_order=shape.z_order,
- attributes=shape.attributes + path.attributes,
- )
- shapes.append(shape)
- return shapes
+######
- def to_polygons(self):
- polygons = self._to_poly_shapes('polygon_paths')
- return polygons + self.polygons
-
- def to_polylines(self):
- polylines = self._to_poly_shapes('polyline_paths')
- return polylines + self.polylines
-
- def to_points(self):
- points = self._to_poly_shapes('points_paths')
- return points + self.points
-
- def to_box_paths(self):
- paths = []
- for box in self.boxes:
- box0 = _InterpolatedBox(box.xtl, box.ytl, box.xbr, box.ybr, box.frame,
- box.occluded, box.z_order, False, True)
- box1 = copy.copy(box0)
- box1.outside = True
- box1.frame += 1
- path = _BoxPath(
- label=box.label,
- start_frame=box.frame,
- stop_frame=box.frame + 1,
- group_id=box.group_id,
- boxes=[box0, box1],
- attributes=box.attributes,
- client_id=box.client_id,
- )
- paths.append(path)
-
- return self.box_paths + paths
-
-
- def _to_poly_paths(self, iter_attr_name):
- paths = []
- for shape in getattr(self, iter_attr_name):
- shape0 = _InterpolatedPolyShape(shape.points, shape.frame, shape.occluded, shape.z_order, False, True)
- shape1 = copy.copy(shape0)
- shape1.outside = True
- shape1.frame += 1
- path = _PolyPath(
- label=shape.label,
- start_frame=shape.frame,
- stop_frame=shape.frame + 1,
- group_id=shape.group_id,
- shapes=[shape0, shape1],
- client_id=shape.client_id,
- attributes=shape.attributes,
- )
- paths.append(path)
-
- return paths
-
- def to_polygon_paths(self):
- return self._to_poly_paths('polygons') + self.polygon_paths
-
- def to_polyline_paths(self):
- return self._to_poly_paths('polylines') + self.polyline_paths
-
- def to_points_paths(self):
- return self._to_poly_paths('points') + self.points_paths
-
-def bulk_create(db_model, objects, flt_param = {}):
+def bulk_create(db_model, objects, flt_param):
if objects:
if flt_param:
if 'postgresql' in settings.DATABASES["default"]["ENGINE"]:
@@ -528,1012 +135,451 @@ def bulk_create(db_model, objects, flt_param = {}):
else:
return db_model.objects.bulk_create(objects)
-class _AnnotationForJob(_Annotation):
- def __init__(self, db_job):
- db_segment = db_job.segment
- super().__init__(db_segment.start_frame, db_segment.stop_frame)
+ return []
+
+def _merge_table_rows(rows, keys_for_merge, field_id):
+ """dot.notation access to dictionary attributes"""
+ from collections import OrderedDict
+ class dotdict(OrderedDict):
+ __getattr__ = OrderedDict.get
+ __setattr__ = OrderedDict.__setitem__
+ __delattr__ = OrderedDict.__delitem__
+ __eq__ = lambda self, other: self.id == other.id
+ __hash__ = lambda self: self.id
+
+ # It is necessary to keep a stable order of original rows
+ # (e.g. for tracked boxes). Otherwise prev_box.frame can be bigger
+ # than next_box.frame.
+ merged_rows = OrderedDict()
+
+ # Group all rows by field_id. In grouped rows replace fields in
+ # accordance with keys_for_merge structure.
+ for row in rows:
+ row_id = row[field_id]
+ if not row_id in merged_rows:
+ merged_rows[row_id] = dotdict(row)
+ for key in keys_for_merge:
+ merged_rows[row_id][key] = []
+
+ for key in keys_for_merge:
+ item = dotdict({v.split('__', 1)[-1]:row[v] for v in keys_for_merge[key]})
+ if item.id is not None:
+ merged_rows[row_id][key].append(item)
+
+ # Remove redundant keys from final objects
+ redundant_keys = [item for values in keys_for_merge.values() for item in values]
+ for i in merged_rows:
+ for j in redundant_keys:
+ del merged_rows[i][j]
+
+ return list(merged_rows.values())
+
+class JobAnnotation:
+ def __init__(self, pk, user):
+ self.user = user
+ self.db_job = models.Job.objects.select_related('segment__task') \
+ .select_for_update().get(id=pk)
+
+ db_segment = self.db_job.segment
+ self.start_frame = db_segment.start_frame
+ self.stop_frame = db_segment.stop_frame
+ self.reset()
# pylint: disable=bad-continuation
- self.db_job = db_job
- self.logger = slogger.job[db_job.id]
+ self.logger = slogger.job[self.db_job.id]
self.db_labels = {db_label.id:db_label
- for db_label in db_job.segment.task.label_set.all()}
+ for db_label in db_segment.task.label_set.all()}
self.db_attributes = {db_attr.id:db_attr
for db_attr in models.AttributeSpec.objects.filter(
- label__task__id=db_job.segment.task.id)}
-
- def _get_client_ids_from_db(self):
- client_ids = set()
-
- ids = list(self.db_job.objectpath_set.values_list('client_id', flat=True))
- client_ids.update(ids)
-
- for shape_type in ['polygons', 'polylines', 'points', 'boxes']:
- ids = list(self._get_shape_class(shape_type).objects.filter(
- job_id=self.db_job.id).values_list('client_id', flat=True))
- client_ids.update(ids)
-
- return client_ids
-
- def _merge_table_rows(self, rows, keys_for_merge, field_id):
- """dot.notation access to dictionary attributes"""
- class dotdict(OrderedDict):
- __getattr__ = OrderedDict.get
- __setattr__ = OrderedDict.__setitem__
- __delattr__ = OrderedDict.__delitem__
- __eq__ = lambda self, other: self.id == other.id
- __hash__ = lambda self: self.id
-
- # It is necessary to keep a stable order of original rows
- # (e.g. for tracked boxes). Otherwise prev_box.frame can be bigger
- # than next_box.frame.
- merged_rows = OrderedDict()
-
- # Group all rows by field_id. In grouped rows replace fields in
- # accordance with keys_for_merge structure.
- for row in rows:
- row_id = row[field_id]
- if not row_id in merged_rows:
- merged_rows[row_id] = dotdict(row)
- for key in keys_for_merge:
- merged_rows[row_id][key] = []
-
- for key in keys_for_merge:
- item = dotdict({v.split('__', 1)[-1]:row[v] for v in keys_for_merge[key]})
- if item.id:
- merged_rows[row_id][key].append(item)
+ label__task__id=db_segment.task.id)}
- # Remove redundant keys from final objects
- redundant_keys = [item for values in keys_for_merge.values() for item in values]
- for i in merged_rows:
- for j in redundant_keys:
- del merged_rows[i][j]
-
- return list(merged_rows.values())
-
- @staticmethod
- def _clamp(value, min_value, max_value):
- return max(min(value, max_value), min_value)
-
- def _clamp_box(self, xtl, ytl, xbr, ybr, im_size):
- xtl = self._clamp(xtl, 0, im_size['width'])
- xbr = self._clamp(xbr, 0, im_size['width'])
- ytl = self._clamp(ytl, 0, im_size['height'])
- ybr = self._clamp(ybr, 0, im_size['height'])
-
- return xtl, ytl, xbr, ybr
-
- def _clamp_poly(self, points, im_size):
- verified = []
- points = points.split(' ')
- for p in points:
- p = p.split(',')
- verified.append('{},{}'.format(
- self._clamp(float(p[0]), 0, im_size['width']),
- self._clamp(float(p[1]), 0, im_size['height'])
- ))
+ def reset(self):
+ self.data = {
+ "version": 0,
+ "tags": [],
+ "shapes": [],
+ "tracks": []
+ }
- return ' '.join(verified)
+ def _save_tracks_to_db(self, tracks):
+ db_tracks = []
+ db_track_attrvals = []
+ db_shapes = []
+ db_shape_attrvals = []
+
+ for track in tracks:
+ track_attributes = track.pop("attributes", [])
+ shapes = track.pop("shapes")
+ db_track = models.LabeledTrack(job=self.db_job, **track)
+ if db_track.label_id not in self.db_labels:
+ raise AttributeError("label_id `{}` is invalid".format(db_track.label_id))
+
+ for attr in track_attributes:
+ db_attrval = models.LabeledTrackAttributeVal(**attr)
+ if db_attrval.spec_id not in self.db_attributes:
+ raise AttributeError("spec_id `{}` is invalid".format(db_attrval.spec_id))
+ db_attrval.track_id = len(db_tracks)
+ db_track_attrvals.append(db_attrval)
- def init_from_db(self):
- def get_values(shape_type):
- if shape_type == 'polygons':
- return [
- ('id', 'frame', 'points', 'label_id', 'group_id', 'occluded', 'z_order', 'client_id',
- 'labeledpolygonattributeval__value', 'labeledpolygonattributeval__spec_id',
- 'labeledpolygonattributeval__id'), {
- 'attributes': [
- 'labeledpolygonattributeval__value',
- 'labeledpolygonattributeval__spec_id',
- 'labeledpolygonattributeval__id'
- ]
- }, 'labeledpolygonattributeval_set'
- ]
- elif shape_type == 'polylines':
- return [
- ('id', 'frame', 'points', 'label_id', 'group_id', 'occluded', 'z_order', 'client_id',
- 'labeledpolylineattributeval__value', 'labeledpolylineattributeval__spec_id',
- 'labeledpolylineattributeval__id'), {
- 'attributes': [
- 'labeledpolylineattributeval__value',
- 'labeledpolylineattributeval__spec_id',
- 'labeledpolylineattributeval__id'
- ]
- }, 'labeledpolylineattributeval_set'
- ]
- elif shape_type == 'boxes':
- return [
- ('id', 'frame', 'xtl', 'ytl', 'xbr', 'ybr', 'label_id', 'group_id', 'occluded', 'z_order', 'client_id',
- 'labeledboxattributeval__value', 'labeledboxattributeval__spec_id',
- 'labeledboxattributeval__id'), {
- 'attributes': [
- 'labeledboxattributeval__value',
- 'labeledboxattributeval__spec_id',
- 'labeledboxattributeval__id'
- ]
- }, 'labeledboxattributeval_set'
- ]
- elif shape_type == 'points':
- return [
- ('id', 'frame', 'points', 'label_id', 'group_id', 'occluded', 'z_order', 'client_id',
- 'labeledpointsattributeval__value', 'labeledpointsattributeval__spec_id',
- 'labeledpointsattributeval__id'), {
- 'attributes': [
- 'labeledpointsattributeval__value',
- 'labeledpointsattributeval__spec_id',
- 'labeledpointsattributeval__id'
- ]
- }, 'labeledpointsattributeval_set'
- ]
+ for shape in shapes:
+ shape_attributes = shape.pop("attributes", [])
+ # FIXME: need to clamp points (be sure that all of them inside the image)
+ # Should we check here or implement a validator?
+ db_shape = models.TrackedShape(**shape)
+ db_shape.track_id = len(db_tracks)
+
+ for attr in shape_attributes:
+ db_attrval = models.TrackedShapeAttributeVal(**attr)
+ if db_attrval.spec_id not in self.db_attributes:
+ raise AttributeError("spec_id `{}` is invalid".format(db_attrval.spec_id))
+ db_attrval.shape_id = len(db_shapes)
+ db_shape_attrvals.append(db_attrval)
+ db_shapes.append(db_shape)
+ shape["attributes"] = shape_attributes
+
+ db_tracks.append(db_track)
+ track["attributes"] = track_attributes
+ track["shapes"] = shapes
+
+ db_tracks = bulk_create(
+ db_model=models.LabeledTrack,
+ objects=db_tracks,
+ flt_param={"job_id": self.db_job.id}
+ )
+
+ for db_attrval in db_track_attrvals:
+ db_attrval.track_id = db_tracks[db_attrval.track_id].id
+ bulk_create(
+ db_model=models.LabeledTrackAttributeVal,
+ objects=db_track_attrvals,
+ flt_param={}
+ )
+
+ for db_shape in db_shapes:
+ db_shape.track_id = db_tracks[db_shape.track_id].id
+
+ db_shapes = bulk_create(
+ db_model=models.TrackedShape,
+ objects=db_shapes,
+ flt_param={"track__job_id": self.db_job.id}
+ )
+
+ for db_attrval in db_shape_attrvals:
+ db_attrval.shape_id = db_shapes[db_attrval.shape_id].id
+
+ bulk_create(
+ db_model=models.TrackedShapeAttributeVal,
+ objects=db_shape_attrvals,
+ flt_param={}
+ )
+
+ shape_idx = 0
+ for track, db_track in zip(tracks, db_tracks):
+ track["id"] = db_track.id
+ for shape in track["shapes"]:
+ shape["id"] = db_shapes[shape_idx].id
+ shape_idx += 1
+
+ self.data["tracks"] = tracks
+
+ def _save_shapes_to_db(self, shapes):
+ db_shapes = []
+ db_attrvals = []
+
+ for shape in shapes:
+ attributes = shape.pop("attributes", [])
+ # FIXME: need to clamp points (be sure that all of them inside the image)
+ # Should we check here or implement a validator?
+ db_shape = models.LabeledShape(job=self.db_job, **shape)
+ if db_shape.label_id not in self.db_labels:
+ raise AttributeError("label_id `{}` is invalid".format(db_shape.label_id))
+
+ for attr in attributes:
+ db_attrval = models.LabeledShapeAttributeVal(**attr)
+ if db_attrval.spec_id not in self.db_attributes:
+ raise AttributeError("spec_id `{}` is invalid".format(db_attrval.spec_id))
+ db_attrval.shape_id = len(db_shapes)
+ db_attrvals.append(db_attrval)
+
+ db_shapes.append(db_shape)
+ shape["attributes"] = attributes
+
+ db_shapes = bulk_create(
+ db_model=models.LabeledShape,
+ objects=db_shapes,
+ flt_param={"job_id": self.db_job.id}
+ )
+
+ for db_attrval in db_attrvals:
+ db_attrval.shape_id = db_shapes[db_attrval.shape_id].id
+
+ bulk_create(
+ db_model=models.LabeledShapeAttributeVal,
+ objects=db_attrvals,
+ flt_param={}
+ )
+
+ for shape, db_shape in zip(shapes, db_shapes):
+ shape["id"] = db_shape.id
+
+ self.data["shapes"] = shapes
+
+ def _save_tags_to_db(self, tags):
+ db_tags = []
+ db_attrvals = []
+
+ for tag in tags:
+ attributes = tag.pop("attributes", [])
+ db_tag = models.LabeledImage(job=self.db_job, **tag)
+ if db_tag.label_id not in self.db_labels:
+ raise AttributeError("label_id `{}` is invalid".format(db_tag.label_id))
+
+ for attr in attributes:
+ db_attrval = models.LabeledImageAttributeVal(**attr)
+ if db_attrval.spec_id not in self.db_attributes:
+ raise AttributeError("spec_id `{}` is invalid".format(db_attrval.spec_id))
+ db_attrval.tag_id = len(db_tags)
+ db_attrvals.append(db_attrval)
+
+ db_tags.append(db_tag)
+ tag["attributes"] = attributes
+
+ db_tags = bulk_create(
+ db_model=models.LabeledImage,
+ objects=db_tags,
+ flt_param={"job_id": self.db_job.id}
+ )
+
+ for db_attrval in db_attrvals:
+ db_attrval.tag_id = db_tags[db_attrval.tag_id].id
+
+ bulk_create(
+ db_model=models.LabeledImageAttributeVal,
+ objects=db_attrvals,
+ flt_param={}
+ )
+
+ for tag, db_tag in zip(tags, db_tags):
+ tag["id"] = db_tag.id
+
+ self.data["tags"] = tags
+
+ def _commit(self):
+ db_prev_commit = self.db_job.commits.last()
+ db_curr_commit = models.JobCommit()
+ if db_prev_commit:
+ db_curr_commit.version = db_prev_commit.version + 1
+ else:
+ db_curr_commit.version = 1
+ db_curr_commit.job = self.db_job
+ db_curr_commit.message = "Changes: tags - {}; shapes - {}; tracks - {}".format(
+ len(self.data["tags"]), len(self.data["shapes"]), len(self.data["tracks"]))
+ db_curr_commit.save()
+ self.data["version"] = db_curr_commit.version
+
+ def _save_to_db(self, data):
self.reset()
- for shape_type in ['boxes', 'points', 'polygons', 'polylines']:
- (values, merge_keys, prefetch) = get_values(shape_type)
- db_shapes = list(self._get_shape_set(shape_type).prefetch_related(prefetch).
- values(*values).order_by('frame'))
- db_shapes = self._merge_table_rows(db_shapes, merge_keys, 'id')
- for db_shape in db_shapes:
- label = _Label(self.db_labels[db_shape.label_id])
- if shape_type == 'boxes':
- shape = _LabeledBox(label=label,
- x0=db_shape.xtl, y0=db_shape.ytl, x1=db_shape.xbr, y1=db_shape.ybr,
- frame=db_shape.frame,
- group_id=db_shape.group_id,
- occluded=db_shape.occluded,
- z_order=db_shape.z_order,
- client_id=db_shape.client_id,
- )
- else:
- shape = _LabeledPolyShape(
- label=label,
- points=db_shape.points,
- frame=db_shape.frame,
- group_id=db_shape.group_id,
- occluded=db_shape.occluded,
- z_order=db_shape.z_order,
- client_id=db_shape.client_id,
- )
- for db_attr in db_shape.attributes:
- if db_attr.id != None:
- spec = self.db_attributes[db_attr.spec_id]
- attr = _Attribute(spec, db_attr.value)
- shape.add_attribute(attr)
- getattr(self, shape_type).append(shape)
-
- db_paths = self.db_job.objectpath_set
- for shape in ['trackedpoints_set', 'trackedbox_set', 'trackedpolyline_set', 'trackedpolygon_set']:
- db_paths.prefetch_related(shape)
- for shape_attr in ['trackedpoints_set__trackedpointsattributeval_set', 'trackedbox_set__trackedboxattributeval_set',
- 'trackedpolygon_set__trackedpolygonattributeval_set', 'trackedpolyline_set__trackedpolylineattributeval_set']:
- db_paths.prefetch_related(shape_attr)
- db_paths.prefetch_related('objectpathattributeval_set')
- db_paths = list (db_paths.values('id', 'frame', 'group_id', 'shapes', 'client_id', 'objectpathattributeval__spec_id',
- 'objectpathattributeval__id', 'objectpathattributeval__value',
- 'trackedbox', 'trackedpolygon', 'trackedpolyline', 'trackedpoints',
- 'trackedbox__id', 'label_id', 'trackedbox__xtl', 'trackedbox__ytl',
- 'trackedbox__xbr', 'trackedbox__ybr', 'trackedbox__frame', 'trackedbox__occluded',
- 'trackedbox__z_order','trackedbox__outside', 'trackedbox__trackedboxattributeval__spec_id',
- 'trackedbox__trackedboxattributeval__value', 'trackedbox__trackedboxattributeval__id',
- 'trackedpolygon__id' ,'trackedpolygon__points', 'trackedpolygon__frame', 'trackedpolygon__occluded',
- 'trackedpolygon__z_order', 'trackedpolygon__outside', 'trackedpolygon__trackedpolygonattributeval__spec_id',
- 'trackedpolygon__trackedpolygonattributeval__value', 'trackedpolygon__trackedpolygonattributeval__id',
- 'trackedpolyline__id', 'trackedpolyline__points', 'trackedpolyline__frame', 'trackedpolyline__occluded',
- 'trackedpolyline__z_order', 'trackedpolyline__outside', 'trackedpolyline__trackedpolylineattributeval__spec_id',
- 'trackedpolyline__trackedpolylineattributeval__value', 'trackedpolyline__trackedpolylineattributeval__id',
- 'trackedpoints__id', 'trackedpoints__points', 'trackedpoints__frame', 'trackedpoints__occluded',
- 'trackedpoints__z_order', 'trackedpoints__outside', 'trackedpoints__trackedpointsattributeval__spec_id',
- 'trackedpoints__trackedpointsattributeval__value', 'trackedpoints__trackedpointsattributeval__id')
- .order_by('id', 'trackedbox__frame', 'trackedpolygon__frame', 'trackedpolyline__frame', 'trackedpoints__frame'))
-
- db_box_paths = list(filter(lambda path: path['shapes'] == 'boxes', db_paths ))
- db_polygon_paths = list(filter(lambda path: path['shapes'] == 'polygons', db_paths ))
- db_polyline_paths = list(filter(lambda path: path['shapes'] == 'polylines', db_paths ))
- db_points_paths = list(filter(lambda path: path['shapes'] == 'points', db_paths ))
-
- object_path_attr_merge_key = [
- 'objectpathattributeval__value',
- 'objectpathattributeval__spec_id',
- 'objectpathattributeval__id'
- ]
-
- db_box_paths = self._merge_table_rows(db_box_paths, {
- 'attributes': object_path_attr_merge_key,
- 'shapes': [
- 'trackedbox__id', 'trackedbox__xtl', 'trackedbox__ytl',
- 'trackedbox__xbr', 'trackedbox__ybr', 'trackedbox__frame',
- 'trackedbox__occluded', 'trackedbox__z_order', 'trackedbox__outside',
- 'trackedbox__trackedboxattributeval__value',
- 'trackedbox__trackedboxattributeval__spec_id',
- 'trackedbox__trackedboxattributeval__id'
- ],
- }, 'id')
-
- db_polygon_paths = self._merge_table_rows(db_polygon_paths, {
- 'attributes': object_path_attr_merge_key,
- 'shapes': [
- 'trackedpolygon__id', 'trackedpolygon__points', 'trackedpolygon__frame',
- 'trackedpolygon__occluded', 'trackedpolygon__z_order', 'trackedpolygon__outside',
- 'trackedpolygon__trackedpolygonattributeval__value',
- 'trackedpolygon__trackedpolygonattributeval__spec_id',
- 'trackedpolygon__trackedpolygonattributeval__id'
- ]
- }, 'id')
-
- db_polyline_paths = self._merge_table_rows(db_polyline_paths, {
- 'attributes': object_path_attr_merge_key,
- 'shapes': [
- 'trackedpolyline__id', 'trackedpolyline__points', 'trackedpolyline__frame',
- 'trackedpolyline__occluded', 'trackedpolyline__z_order', 'trackedpolyline__outside',
- 'trackedpolyline__trackedpolylineattributeval__value',
- 'trackedpolyline__trackedpolylineattributeval__spec_id',
- 'trackedpolyline__trackedpolylineattributeval__id'
- ],
- }, 'id')
-
- db_points_paths = self._merge_table_rows(db_points_paths, {
- 'attributes': object_path_attr_merge_key,
- 'shapes': [
- 'trackedpoints__id', 'trackedpoints__points', 'trackedpoints__frame',
- 'trackedpoints__occluded', 'trackedpoints__z_order', 'trackedpoints__outside',
- 'trackedpoints__trackedpointsattributeval__value',
- 'trackedpoints__trackedpointsattributeval__spec_id',
- 'trackedpoints__trackedpointsattributeval__id'
- ]
- }, 'id')
-
- for db_box_path in db_box_paths:
- db_box_path.attributes = list(set(db_box_path.attributes))
- db_box_path.shapes = self._merge_table_rows(db_box_path.shapes, {
- 'attributes': [
- 'trackedboxattributeval__value',
- 'trackedboxattributeval__spec_id',
- 'trackedboxattributeval__id'
- ]
- }, 'id')
-
- for db_polygon_path in db_polygon_paths:
- db_polygon_path.attributes = list(set(db_polygon_path.attributes))
- db_polygon_path.shapes = self._merge_table_rows(db_polygon_path.shapes, {
- 'attributes': [
- 'trackedpolygonattributeval__value',
- 'trackedpolygonattributeval__spec_id',
- 'trackedpolygonattributeval__id'
- ]
- }, 'id')
-
- for db_polyline_path in db_polyline_paths:
- db_polyline_path.attributes = list(set(db_polyline_path.attributes))
- db_polyline_path.shapes = self._merge_table_rows(db_polyline_path.shapes, {
- 'attributes': [
- 'trackedpolylineattributeval__value',
- 'trackedpolylineattributeval__spec_id',
- 'trackedpolylineattributeval__id'
- ]
- }, 'id')
-
- for db_points_path in db_points_paths:
- db_points_path.attributes = list(set(db_points_path.attributes))
- db_points_path.shapes = self._merge_table_rows(db_points_path.shapes, {
- 'attributes': [
- 'trackedpointsattributeval__value',
- 'trackedpointsattributeval__spec_id',
- 'trackedpointsattributeval__id'
+ self._save_tags_to_db(data["tags"])
+ self._save_shapes_to_db(data["shapes"])
+ self._save_tracks_to_db(data["tracks"])
+
+ return self.data["tags"] or self.data["shapes"] or self.data["tracks"]
+
+ def _create(self, data):
+ if self._save_to_db(data):
+ db_task = self.db_job.segment.task
+ db_task.updated_date = timezone.now()
+ db_task.save()
+ self.db_job.save()
+
+ def create(self, data):
+ self._create(data)
+ self._commit()
+
+ def put(self, data):
+ self._delete()
+ self._create(data)
+ self._commit()
+
+ def update(self, data):
+ self._delete(data)
+ self._create(data)
+ self._commit()
+
+ def _delete(self, data=None):
+ if data is None:
+ self.db_job.labeledimage_set.all().delete()
+ self.db_job.labeledshape_set.all().delete()
+ self.db_job.labeledtrack_set.all().delete()
+ else:
+ labeledimage_ids = [image["id"] for image in data["tags"]]
+ labeledshape_ids = [shape["id"] for shape in data["shapes"]]
+ labeledtrack_ids = [track["id"] for track in data["tracks"]]
+ labeledimage_set = self.db_job.labeledimage_set
+ labeledimage_set = labeledimage_set.filter(pk__in=labeledimage_ids)
+ labeledshape_set = self.db_job.labeledshape_set
+ labeledshape_set = labeledshape_set.filter(pk__in=labeledshape_ids)
+ labeledtrack_set = self.db_job.labeledtrack_set
+ labeledtrack_set = labeledtrack_set.filter(pk__in=labeledtrack_ids)
+
+ # It is not important for us that data had some "invalid" objects
+ # which were skipped (not acutally deleted). The main idea is to
+ # say that all requested objects are absent in DB after the method.
+ self.data = data
+
+ labeledimage_set.delete()
+ labeledshape_set.delete()
+ labeledtrack_set.delete()
+
+ def delete(self, data=None):
+ self._delete(data)
+ self._commit()
+
+ def _init_tags_from_db(self):
+ db_tags = self.db_job.labeledimage_set.prefetch_related(
+ "label",
+ "labeledimageattributeval_set"
+ ).values(
+ 'id',
+ 'frame',
+ 'label_id',
+ 'group',
+ 'labeledimageattributeval__spec_id',
+ 'labeledimageattributeval__value',
+ 'labeledimageattributeval__id',
+ ).order_by('frame')
+
+ db_tags = _merge_table_rows(
+ rows=db_tags,
+ keys_for_merge={
+ "labeledimageattributeval_set": [
+ 'labeledimageattributeval__spec_id',
+ 'labeledimageattributeval__value',
+ 'labeledimageattributeval__id',
+ ],
+ },
+ field_id='id',
+ )
+ serializer = serializers.LabeledImageSerializer(db_tags, many=True)
+ self.data["tags"] = serializer.data
+
+ def _init_shapes_from_db(self):
+ db_shapes = self.db_job.labeledshape_set.prefetch_related(
+ "label",
+ "labeledshapeattributeval_set"
+ ).values(
+ 'id',
+ 'label_id',
+ 'type',
+ 'frame',
+ 'group',
+ 'occluded',
+ 'z_order',
+ 'points',
+ 'labeledshapeattributeval__spec_id',
+ 'labeledshapeattributeval__value',
+ 'labeledshapeattributeval__id',
+ ).order_by('frame')
+
+ db_shapes = _merge_table_rows(
+ rows=db_shapes,
+ keys_for_merge={
+ 'labeledshapeattributeval_set': [
+ 'labeledshapeattributeval__spec_id',
+ 'labeledshapeattributeval__value',
+ 'labeledshapeattributeval__id',
+ ],
+ },
+ field_id='id',
+ )
+
+ serializer = serializers.LabeledShapeSerializer(db_shapes, many=True)
+ self.data["shapes"] = serializer.data
+
+ def _init_tracks_from_db(self):
+ db_tracks = self.db_job.labeledtrack_set.prefetch_related(
+ "label",
+ "labeledtrackattributeval_set",
+ "trackedshape_set__trackedshapeattributeval_set"
+ ).values(
+ "id",
+ "frame",
+ "label_id",
+ "group",
+ "labeledtrackattributeval__spec_id",
+ "labeledtrackattributeval__value",
+ "labeledtrackattributeval__id",
+ "trackedshape__type",
+ "trackedshape__occluded",
+ "trackedshape__z_order",
+ "trackedshape__points",
+ "trackedshape__id",
+ "trackedshape__frame",
+ "trackedshape__outside",
+ "trackedshape__trackedshapeattributeval__spec_id",
+ "trackedshape__trackedshapeattributeval__value",
+ "trackedshape__trackedshapeattributeval__id",
+ ).order_by('id', 'trackedshape__frame')
+
+ db_tracks = _merge_table_rows(
+ rows=db_tracks,
+ keys_for_merge={
+ "labeledtrackattributeval_set": [
+ "labeledtrackattributeval__spec_id",
+ "labeledtrackattributeval__value",
+ "labeledtrackattributeval__id",
+ ],
+ "trackedshape_set":[
+ "trackedshape__type",
+ "trackedshape__occluded",
+ "trackedshape__z_order",
+ "trackedshape__points",
+ "trackedshape__id",
+ "trackedshape__frame",
+ "trackedshape__outside",
+ "trackedshape__trackedshapeattributeval__spec_id",
+ "trackedshape__trackedshapeattributeval__value",
+ "trackedshape__trackedshapeattributeval__id",
+ ],
+ },
+ field_id="id",
+ )
+
+ for db_track in db_tracks:
+ db_track["trackedshape_set"] = _merge_table_rows(db_track["trackedshape_set"], {
+ 'trackedshapeattributeval_set': [
+ 'trackedshapeattributeval__value',
+ 'trackedshapeattributeval__spec_id',
+ 'trackedshapeattributeval__id',
]
}, 'id')
- for db_path in db_box_paths:
- for db_shape in db_path.shapes:
- db_shape.attributes = list(set(db_shape.attributes))
- label = _Label(self.db_labels[db_path.label_id])
- path = _BoxPath(
- label=label,
- start_frame=db_path.frame,
- stop_frame=self.stop_frame,
- group_id=db_path.group_id,
- client_id=db_path.client_id,
- )
- for db_attr in db_path.attributes:
- spec = self.db_attributes[db_attr.spec_id]
- attr = _Attribute(spec, db_attr.value)
- path.add_attribute(attr)
-
- frame = -1
- for db_shape in db_path.shapes:
- box = _TrackedBox(
- x0=db_shape.xtl, y0=db_shape.ytl, x1=db_shape.xbr, y1=db_shape.ybr,
- frame=db_shape.frame,
- occluded=db_shape.occluded,
- z_order=db_shape.z_order,
- outside=db_shape.outside,
+ # A result table can consist many equal rows for track/shape attributes
+ # We need filter unique attributes manually
+ db_track["labeledtrackattributeval_set"] = list(set(db_track["labeledtrackattributeval_set"]))
+ for db_shape in db_track["trackedshape_set"]:
+ db_shape["trackedshapeattributeval_set"] = list(
+ set(db_shape["trackedshapeattributeval_set"])
)
- assert box.frame > frame
- frame = box.frame
-
- for db_attr in db_shape.attributes:
- spec = self.db_attributes[db_attr.spec_id]
- attr = _Attribute(spec, db_attr.value)
- box.add_attribute(attr)
- path.add_box(box)
-
- self.box_paths.append(path)
-
- for idx, paths_type in enumerate(['polygon_paths', 'polyline_paths', 'points_paths']):
- source = [db_polygon_paths, db_polyline_paths, db_points_paths][idx]
-
- for db_path in source:
- for db_shape in db_path.shapes:
- db_shape.attributes = list(set(db_shape.attributes))
- label = _Label(self.db_labels[db_path.label_id])
- path = _PolyPath(
- label=label,
- start_frame=db_path.frame,
- stop_frame= self.stop_frame,
- group_id=db_path.group_id,
- client_id=db_path.client_id,
- )
- for db_attr in db_path.attributes:
- spec = self.db_attributes[db_attr.spec_id]
- attr = _Attribute(spec, db_attr.value)
- path.add_attribute(attr)
-
- frame = -1
- for db_shape in db_path.shapes:
- shape = _TrackedPolyShape(
- points=db_shape.points,
- frame=db_shape.frame,
- occluded=db_shape.occluded,
- z_order=db_shape.z_order,
- outside=db_shape.outside,
- )
- assert shape.frame > frame
- frame = shape.frame
-
- for db_attr in db_shape.attributes:
- spec = self.db_attributes[db_attr.spec_id]
- attr = _Attribute(spec, db_attr.value)
- shape.add_attribute(attr)
- path.add_shape(shape)
-
- getattr(self, paths_type).append(path)
-
-
- def init_from_client(self, data):
- # All fields inside data should be converted to correct type explicitly.
- # We cannot trust that client will send 23 as integer. Here we also
- # accept "23".
- db_task = self.db_job.segment.task
- image_meta = get_image_meta_cache(db_task)
- self.reset()
-
- for box in data['boxes']:
- label = _Label(self.db_labels[int(box['label_id'])])
-
- frame_idx = int(box['frame']) if db_task.mode == 'annotation' else 0
- xtl, ytl, xbr, ybr = self._clamp_box(float(box['xtl']), float(box['ytl']),
- float(box['xbr']), float(box['ybr']),
- image_meta['original_size'][frame_idx])
-
- labeled_box = _LabeledBox(
- label=label,
- x0=xtl, y0=ytl, x1=xbr, y1=ybr,
- frame=int(box['frame']),
- group_id=int(box['group_id']),
- occluded=strtobool(str(box['occluded'])),
- z_order=int(box['z_order']),
- client_id=int(box['id']),
- )
-
- for attr in box['attributes']:
- spec = self.db_attributes[int(attr['id'])]
- attr = _Attribute(spec, str(attr['value']))
- labeled_box.add_attribute(attr)
-
- self.boxes.append(labeled_box)
-
- for poly_shape_type in ['points', 'polygons', 'polylines']:
- for poly_shape in data[poly_shape_type]:
- label = _Label(self.db_labels[int(poly_shape['label_id'])])
-
- frame_idx = int(poly_shape['frame']) if db_task.mode == 'annotation' else 0
- points = self._clamp_poly(poly_shape['points'], image_meta['original_size'][frame_idx])
- labeled_poly_shape = _LabeledPolyShape(
- label=label,
- points=points,
- frame=int(poly_shape['frame']),
- group_id=int(poly_shape['group_id']),
- occluded=poly_shape['occluded'],
- z_order=int(poly_shape['z_order']),
- client_id=int(poly_shape['id']),
- )
-
- for attr in poly_shape['attributes']:
- spec = self.db_attributes[int(attr['id'])]
- attr = _Attribute(spec, str(attr['value']))
- labeled_poly_shape.add_attribute(attr)
-
- getattr(self, poly_shape_type).append(labeled_poly_shape)
-
- for path in data['box_paths']:
- label = _Label(self.db_labels[int(path['label_id'])])
- boxes = []
- frame = -1
-
- has_boxes_on_prev_segm = False
- last_box_on_prev_segm = None
- has_box_on_start_frame = False
- for box in path['shapes']:
- if int(box['frame']) < self.start_frame:
- has_boxes_on_prev_segm = True
- if last_box_on_prev_segm is None or int(last_box_on_prev_segm["frame"]) < int(box["frame"]):
- last_box_on_prev_segm = box
- elif int(box['frame']) == self.start_frame:
- has_box_on_start_frame = True
- break
- if has_boxes_on_prev_segm and not has_box_on_start_frame:
- last_box_on_prev_segm["frame"] = self.start_frame
-
- for box in path['shapes']:
- if int(box['frame']) <= self.stop_frame and int(box['frame']) >= self.start_frame:
- frame_idx = int(box['frame']) if db_task.mode == 'annotation' else 0
- xtl, ytl, xbr, ybr = self._clamp_box(float(box['xtl']), float(box['ytl']),
- float(box['xbr']), float(box['ybr']), image_meta['original_size'][frame_idx])
- tracked_box = _TrackedBox(
- x0=xtl, y0=ytl, x1=xbr, y1=ybr,
- frame=int(box['frame']),
- occluded=strtobool(str(box['occluded'])),
- z_order=int(box['z_order']),
- outside=strtobool(str(box['outside'])),
- )
- assert tracked_box.frame > frame
- frame = tracked_box.frame
-
- for attr in box['attributes']:
- spec = self.db_attributes[int(attr['id'])]
- assert spec.is_mutable()
- attr = _Attribute(spec, str(attr['value']))
- tracked_box.add_attribute(attr)
-
- boxes.append(tracked_box)
- else:
- self.logger.error("init_from_client: ignore frame #%d " +
- "because it out of segment range [%d-%d]", int(box['frame']), self.start_frame, self.stop_frame)
-
- attributes = []
- for attr in path['attributes']:
- spec = self.db_attributes[int(attr['id'])]
- assert not spec.is_mutable()
- attr = _Attribute(spec, str(attr['value']))
- attributes.append(attr)
-
- assert frame <= self.stop_frame
- box_path = _BoxPath(label=label,
- start_frame=min(list(map(lambda box: box.frame, boxes))),
- stop_frame=self.stop_frame,
- group_id=int(path['group_id']),
- boxes=boxes,
- client_id=int(path['id']),
- attributes=attributes,
- )
- self.box_paths.append(box_path)
-
- for poly_path_type in ['points_paths', 'polygon_paths', 'polyline_paths']:
- for path in data[poly_path_type]:
- label = _Label(self.db_labels[int(path['label_id'])])
- poly_shapes = []
- frame = -1
-
- has_shapes_on_prev_segm = False
- last_shape_on_prev_segm = None
- has_shape_on_start_frame = False
- for poly_shape in path['shapes']:
- if int(poly_shape['frame']) < self.start_frame:
- has_shapes_on_prev_segm = True
- if last_shape_on_prev_segm is None or int(last_shape_on_prev_segm["frame"]) < (poly_shape["frame"]):
- last_shape_on_prev_segm = box
- elif int(poly_shape['frame']) == self.start_frame:
- has_shape_on_start_frame = True
- break
- if has_shapes_on_prev_segm and not has_shape_on_start_frame:
- last_shape_on_prev_segm["frame"] = self.start_frame
-
- for poly_shape in path['shapes']:
- if int(poly_shape['frame']) <= self.stop_frame and int(poly_shape['frame']) >= self.start_frame:
- frame_idx = int(poly_shape['frame']) if db_task.mode == 'annotation' else 0
- points = self._clamp_poly(poly_shape['points'], image_meta['original_size'][frame_idx])
- tracked_poly_shape = _TrackedPolyShape(
- points=points,
- frame=int(poly_shape['frame']),
- occluded=strtobool(str(poly_shape['occluded'])),
- z_order=int(poly_shape['z_order']),
- outside=strtobool(str(poly_shape['outside'])),
- )
- assert tracked_poly_shape.frame > frame
- frame = tracked_poly_shape.frame
-
- for attr in poly_shape['attributes']:
- spec = self.db_attributes[int(attr['id'])]
- assert spec.is_mutable()
- attr = _Attribute(spec, str(attr['value']))
- tracked_poly_shape.add_attribute(attr)
-
- poly_shapes.append(tracked_poly_shape)
- else:
- self.logger.error("init_from_client: ignore frame #%d " +
- "because it out of segment range [%d-%d]", int(poly_shape['frame']), self.start_frame, self.stop_frame)
-
- attributes = []
- for attr in path['attributes']:
- spec = self.db_attributes[int(attr['id'])]
- assert not spec.is_mutable()
- attr = _Attribute(spec, str(attr['value']))
- attributes.append(attr)
-
- poly_path = _PolyPath(
- label=label,
- start_frame=min(list(map(lambda shape: shape.frame, poly_shapes))),
- stop_frame=self.stop_frame + 1,
- group_id=int(path['group_id']),
- shapes=poly_shapes,
- client_id=int(path['id']),
- attributes=attributes,
- )
-
- getattr(self, poly_path_type).append(poly_path)
-
- return self.has_data()
-
- def _get_shape_class(self, shape_type):
- if shape_type == 'polygons':
- return models.LabeledPolygon
- elif shape_type == 'polylines':
- return models.LabeledPolyline
- elif shape_type == 'boxes':
- return models.LabeledBox
- elif shape_type == 'points':
- return models.LabeledPoints
- elif shape_type == 'polygon_paths':
- return models.TrackedPolygon
- elif shape_type == 'polyline_paths':
- return models.TrackedPolyline
- elif shape_type == 'box_paths':
- return models.TrackedBox
- elif shape_type == 'points_paths':
- return models.TrackedPoints
-
- def _get_shape_attr_class(self, shape_type):
- if shape_type == 'polygons':
- return models.LabeledPolygonAttributeVal
- elif shape_type == 'polylines':
- return models.LabeledPolylineAttributeVal
- elif shape_type == 'boxes':
- return models.LabeledBoxAttributeVal
- elif shape_type == 'points':
- return models.LabeledPointsAttributeVal
- elif shape_type == 'polygon_paths':
- return models.TrackedPolygonAttributeVal
- elif shape_type == 'polyline_paths':
- return models.TrackedPolylineAttributeVal
- elif shape_type == 'box_paths':
- return models.TrackedBoxAttributeVal
- elif shape_type == 'points_paths':
- return models.TrackedPointsAttributeVal
-
- def _save_paths_to_db(self):
- for shape_type in ['polygon_paths', 'polyline_paths', 'points_paths', 'box_paths']:
- db_paths = []
- db_path_attrvals = []
- db_shapes = []
- db_shape_attrvals = []
-
- shapes = getattr(self, shape_type)
- for path in shapes:
- db_path = models.ObjectPath()
- db_path.job = self.db_job
- db_path.label = self.db_labels[path.label.id]
- db_path.frame = path.frame
- db_path.group_id = path.group_id
- db_path.client_id = path.client_id
- if shape_type == 'polygon_paths':
- db_path.shapes = 'polygons'
- elif shape_type == 'polyline_paths':
- db_path.shapes = 'polylines'
- elif shape_type == 'box_paths':
- db_path.shapes = 'boxes'
- elif shape_type == 'points_paths':
- db_path.shapes = 'points'
-
- for attr in path.attributes:
- db_attrspec = self.db_attributes[attr.id]
- db_attrval = models.ObjectPathAttributeVal()
- db_attrval.track_id = len(db_paths)
- db_attrval.spec = db_attrspec
- db_attrval.value = attr.value
- db_path_attrvals.append(db_attrval)
-
- path_shapes = path.boxes if hasattr(path, 'boxes') else path.shapes
- for shape in path_shapes:
- db_shape = self._get_shape_class(shape_type)()
- db_shape.track_id = len(db_paths)
- if shape_type == 'box_paths':
- db_shape.xtl = shape.xtl
- db_shape.ytl = shape.ytl
- db_shape.xbr = shape.xbr
- db_shape.ybr = shape.ybr
- else:
- db_shape.points = shape.points
- db_shape.frame = shape.frame
- db_shape.occluded = shape.occluded
- db_shape.z_order = shape.z_order
- db_shape.outside = shape.outside
-
- for attr in shape.attributes:
- db_attrspec = self.db_attributes[attr.id]
- db_attrval = self._get_shape_attr_class(shape_type)()
- if shape_type == 'polygon_paths':
- db_attrval.polygon_id = len(db_shapes)
- elif shape_type == 'polyline_paths':
- db_attrval.polyline_id = len(db_shapes)
- elif shape_type == 'box_paths':
- db_attrval.box_id = len(db_shapes)
- elif shape_type == 'points_paths':
- db_attrval.points_id = len(db_shapes)
- db_attrval.spec = db_attrspec
- db_attrval.value = attr.value
- db_shape_attrvals.append(db_attrval)
-
- db_shapes.append(db_shape)
- db_paths.append(db_path)
-
- db_paths = bulk_create(models.ObjectPath, db_paths,
- {"job_id": self.db_job.id})
-
- for db_attrval in db_path_attrvals:
- db_attrval.track_id = db_paths[db_attrval.track_id].id
- bulk_create(models.ObjectPathAttributeVal, db_path_attrvals)
-
- for db_shape in db_shapes:
- db_shape.track_id = db_paths[db_shape.track_id].id
-
- db_shapes = bulk_create(self._get_shape_class(shape_type), db_shapes,
- {"track__job_id": self.db_job.id})
-
- for db_attrval in db_shape_attrvals:
- if shape_type == 'polygon_paths':
- db_attrval.polygon_id = db_shapes[db_attrval.polygon_id].id
- elif shape_type == 'polyline_paths':
- db_attrval.polyline_id = db_shapes[db_attrval.polyline_id].id
- elif shape_type == 'box_paths':
- db_attrval.box_id = db_shapes[db_attrval.box_id].id
- elif shape_type == 'points_paths':
- db_attrval.points_id = db_shapes[db_attrval.points_id].id
-
- bulk_create(self._get_shape_attr_class(shape_type), db_shape_attrvals)
-
- def _get_shape_set(self, shape_type):
- if shape_type == 'polygons':
- return self.db_job.labeledpolygon_set
- elif shape_type == 'polylines':
- return self.db_job.labeledpolyline_set
- elif shape_type == 'boxes':
- return self.db_job.labeledbox_set
- elif shape_type == 'points':
- return self.db_job.labeledpoints_set
-
- def _save_shapes_to_db(self):
- for shape_type in ['polygons', 'polylines', 'points', 'boxes']:
- db_shapes = []
- db_attrvals = []
-
- shapes = getattr(self, shape_type)
- for shape in shapes:
- db_shape = self._get_shape_class(shape_type)()
- db_shape.job = self.db_job
- db_shape.label = self.db_labels[shape.label.id]
- db_shape.group_id = shape.group_id
- db_shape.client_id = shape.client_id
- if shape_type == 'boxes':
- db_shape.xtl = shape.xtl
- db_shape.ytl = shape.ytl
- db_shape.xbr = shape.xbr
- db_shape.ybr = shape.ybr
- else:
- db_shape.points = shape.points
- db_shape.frame = shape.frame
- db_shape.occluded = shape.occluded
- db_shape.z_order = shape.z_order
-
- for attr in shape.attributes:
- db_attrval = self._get_shape_attr_class(shape_type)()
- if shape_type == 'polygons':
- db_attrval.polygon_id = len(db_shapes)
- elif shape_type == 'polylines':
- db_attrval.polyline_id = len(db_shapes)
- elif shape_type == 'boxes':
- db_attrval.box_id = len(db_shapes)
- else:
- db_attrval.points_id = len(db_shapes)
-
- db_attrval.spec = self.db_attributes[attr.id]
- db_attrval.value = attr.value
- db_attrvals.append(db_attrval)
-
- db_shapes.append(db_shape)
- db_shapes = bulk_create(self._get_shape_class(shape_type), db_shapes,
- {"job_id": self.db_job.id})
+ serializer = serializers.LabeledTrackSerializer(db_tracks, many=True)
+ self.data["tracks"] = serializer.data
- for db_attrval in db_attrvals:
- if shape_type == 'polygons':
- db_attrval.polygon_id = db_shapes[db_attrval.polygon_id].id
- elif shape_type == 'polylines':
- db_attrval.polyline_id = db_shapes[db_attrval.polyline_id].id
- elif shape_type == 'boxes':
- db_attrval.box_id = db_shapes[db_attrval.box_id].id
- else:
- db_attrval.points_id = db_shapes[db_attrval.points_id].id
-
- bulk_create(self._get_shape_attr_class(shape_type), db_attrvals)
-
- def _update_shapes_in_db(self):
- client_ids_to_delete = {}
- for shape_type in ['polygons', 'polylines', 'points', 'boxes']:
- client_ids_to_delete[shape_type] = list(shape.client_id for shape in getattr(self, shape_type))
- self._delete_shapes_from_db(client_ids_to_delete)
- self._save_shapes_to_db()
-
- def _update_paths_in_db(self):
- client_ids_to_delete = {}
- for shape_type in ['polygon_paths', 'polyline_paths', 'points_paths', 'box_paths']:
- client_ids_to_delete[shape_type] = list(shape.client_id for shape in getattr(self, shape_type))
- self._delete_paths_from_db(client_ids_to_delete)
- self._save_paths_to_db()
-
- def _delete_shapes_from_db(self, data):
- for shape_type in ['polygons', 'polylines', 'points', 'boxes']:
- client_ids_to_delete = data[shape_type]
- deleted = self._get_shape_set(shape_type).filter(client_id__in=client_ids_to_delete).delete()
- class_name = 'engine.{}'.format(self._get_shape_class(shape_type).__name__)
- if not (deleted[0] == 0 and len(client_ids_to_delete) == 0) and (class_name in deleted[1] and deleted[1][class_name] != len(client_ids_to_delete)):
- raise Exception('Number of deleted object doesn\'t match with requested number')
-
- def _delete_paths_from_db(self, data):
- client_ids_to_delete = []
- for shape_type in ['polygon_paths', 'polyline_paths', 'points_paths', 'box_paths']:
- client_ids_to_delete.extend(data[shape_type])
- deleted = self.db_job.objectpath_set.filter(client_id__in=client_ids_to_delete).delete()
- class_name = 'engine.ObjectPath'
- if not (deleted[0] == 0 and len(client_ids_to_delete) == 0) and \
- (class_name in deleted[1] and deleted[1][class_name] != len(client_ids_to_delete)):
- raise Exception('Number of deleted object doesn\'t match with requested number')
-
- def delete_all_shapes_from_db(self):
- for shape_type in ['polygons', 'polylines', 'points', 'boxes']:
- self._get_shape_set(shape_type).all().delete()
-
- def delete_all_paths_from_db(self):
- self.db_job.objectpath_set.all().delete()
-
- def delete_from_db(self, data):
- self._delete_shapes_from_db(data)
- self._delete_paths_from_db(data)
-
- def update_in_db(self, data):
- if self.init_from_client(data):
- self._update_shapes_in_db()
- self._update_paths_in_db()
-
- def save_to_db(self, data):
- if self.init_from_client(data):
- self._save_shapes_to_db()
- self._save_paths_to_db()
-
- def to_client(self):
- data = {
- "boxes": [],
- "box_paths": [],
- "polygons": [],
- "polygon_paths": [],
- "polylines": [],
- "polyline_paths": [],
- "points": [],
- "points_paths": [],
- }
-
- for box in self.boxes:
- data["boxes"].append({
- "id": box.client_id,
- "label_id": box.label.id,
- "group_id": box.group_id,
- "xtl": box.xtl,
- "ytl": box.ytl,
- "xbr": box.xbr,
- "ybr": box.ybr,
- "occluded": box.occluded,
- "z_order": box.z_order,
- "frame": box.frame,
- "attributes": [{'id': attr.id, 'value':attr.value} for attr in box.attributes],
- })
-
- for poly_type in ['polygons', 'polylines', 'points']:
- for poly in getattr(self, poly_type):
- data[poly_type].append({
- "id": poly.client_id,
- "label_id": poly.label.id,
- "group_id": poly.group_id,
- "points": poly.points,
- "occluded": poly.occluded,
- "z_order": poly.z_order,
- "frame": poly.frame,
- "attributes": [{'id': attr.id, 'value':attr.value} for attr in poly.attributes],
- })
-
- for box_path in self.box_paths:
- data["box_paths"].append({
- "id": box_path.client_id,
- "label_id": box_path.label.id,
- "group_id": box_path.group_id,
- "frame": box_path.frame,
- "attributes": [{'id': attr.id, 'value':attr.value} for attr in box_path.attributes],
- "shapes": [box for box in map(lambda box:
- ({
- "frame": box.frame,
- "xtl": box.xtl,
- "ytl": box.ytl,
- "xbr": box.xbr,
- "ybr": box.ybr,
- "occluded": box.occluded,
- "z_order": box.z_order,
- "outside": box.outside,
- "attributes": [{'id': attr.id, 'value':attr.value} for attr in box.attributes],
- }), box_path.boxes)
- ],
- })
-
- for poly_path_type in ['polygon_paths', 'polyline_paths', 'points_paths']:
- for poly_path in getattr(self, poly_path_type):
- data[poly_path_type].append({
- "id": poly_path.client_id,
- "label_id": poly_path.label.id,
- "group_id": poly_path.group_id,
- "frame": poly_path.frame,
- "attributes": [{'id': attr.id, 'value':attr.value} for attr in poly_path.attributes],
- "shapes": [shape for shape in map(lambda shape:
- ({
- "frame": shape.frame,
- "points": shape.points,
- "occluded": shape.occluded,
- "z_order": shape.z_order,
- "outside": shape.outside,
- "attributes": [{'id': attr.id, 'value':attr.value} for attr in shape.attributes],
- }), poly_path.shapes)
- ],
- })
-
- return data
-
- def validate_data_from_client(self, data):
- client_ids = {
- 'saved': self._get_client_ids_from_db(),
- 'create': set(),
- 'update': set(),
- 'delete': set(),
- }
-
- def extract_clinet_id(shape, action):
- if action != 'delete':
- if 'id' not in shape:
- raise Exception('No id field in received data')
- client_id = shape['id']
- else:
- # client send only shape.id, not shape object
- client_id = shape
- client_ids[action].add(client_id)
-
- shape_types = ['boxes', 'points', 'polygons', 'polylines', 'box_paths',
- 'points_paths', 'polygon_paths', 'polyline_paths']
-
- for action in ['create', 'update', 'delete']:
- for shape_type in shape_types:
- for shape in data[action][shape_type]:
- extract_clinet_id(shape, action)
-
- # In case of delete action potentially it is possible to intersect set of IDs
- # that should delete and set of IDs that should create(i.e. save uploaded anno).
- # There is no need to check that
- tmp_res = (client_ids['create'] & client_ids['update']) | (client_ids['update'] & client_ids['delete'])
- if tmp_res:
- raise Exception('More than one action for shape(s) with id={}'.format(tmp_res))
-
- tmp_res = (client_ids['saved'] - client_ids['delete']) & client_ids['create']
- if tmp_res:
- raise Exception('Trying to create new shape(s) with existing client id {}'.format(tmp_res))
-
- tmp_res = client_ids['delete'] - client_ids['saved']
- if tmp_res:
- raise Exception('Trying to delete shape(s) with nonexistent client id {}'.format(tmp_res))
-
- tmp_res = client_ids['update'] - (client_ids['saved'] - client_ids['delete'])
- if tmp_res:
- raise Exception('Trying to update shape(s) with nonexistent client id {}'.format(tmp_res))
-
- max_id = self.db_job.max_shape_id
- if any(new_client_id <= max_id for new_client_id in client_ids['create']):
- raise Exception('Trying to create shape(s) with client id {} less than allowed value {}'.format(client_ids['create'], max_id))
-
- return client_ids
-
- def force_set_client_id(self, data):
- shape_types = ['boxes', 'points', 'polygons', 'polylines', 'box_paths',
- 'points_paths', 'polygon_paths', 'polyline_paths']
-
- max_id = self.db_job.max_shape_id
- for shape_type in shape_types:
- if not data[shape_type]:
- continue
- for shape in data[shape_type]:
- if 'id' in shape:
- max_id = max(max_id, shape['id'])
-
- max_id += 1
- for shape_type in shape_types:
- for shape in data[shape_type]:
- if 'id' not in shape or shape['id'] == -1:
- shape['id'] = max_id
- max_id += 1
-
-class _AnnotationForSegment(_Annotation):
- def __init__(self, db_segment):
- super().__init__(db_segment.start_frame, db_segment.stop_frame)
- self.db_segment = db_segment
+ def _init_version_from_db(self):
+ db_commit = self.db_job.commits.last()
+ if db_commit:
+ self.data["version"] = db_commit.version
+ else:
+ self.data["version"] = 0
def init_from_db(self):
- # FIXME: at the moment a segment has only one job always. Thus
- # the implementation makes sense. Need to implement a good one
- # in the future.
- self.reset()
-
- db_job0 = list(self.db_segment.job_set.all())[0]
- annotation = _AnnotationForJob(db_job0)
- annotation.init_from_db()
- self.boxes = annotation.boxes
- self.box_paths = annotation.box_paths
- self.polygons = annotation.polygons
- self.polygon_paths = annotation.polygon_paths
- self.polylines = annotation.polylines
- self.polyline_paths = annotation.polyline_paths
- self.points = annotation.points
- self.points_paths = annotation.points_paths
+ self._init_tags_from_db()
+ self._init_shapes_from_db()
+ self._init_tracks_from_db()
+ self._init_version_from_db()
-@plugin_decorator
-def _dump(tid, data_format, scheme, host, plugin_meta_data):
- # For big tasks dump function may run for a long time and
- # we dont need to acquire lock after _AnnotationForTask instance
- # has been initialized from DB.
- # But there is the bug with corrupted dump file in case 2 or more dump request received at the same time.
- # https://github.com/opencv/cvat/issues/217
- with transaction.atomic():
- db_task = models.Task.objects.select_for_update().get(id=tid)
- annotation = _AnnotationForTask(db_task)
- annotation.init_from_db()
-
- annotation.dump(data_format, scheme, host, plugin_meta_data)
-
-def _calc_box_area(box):
- return (box.xbr - box.xtl) * (box.ybr - box.ytl)
-
-def _calc_overlap_box_area(box0, box1):
- dx = min(box0.xbr, box1.xbr) - max(box0.xtl, box1.xtl)
- dy = min(box0.ybr, box1.ybr) - max(box0.ytl, box1.ytl)
- if dx > 0 and dy > 0:
- return dx * dy
- else:
- return 0
-
-def _calc_box_IoU(box0, box1):
- overlap_area = _calc_overlap_box_area(box0, box1)
- return overlap_area / (_calc_box_area(box0) + _calc_box_area(box1) - overlap_area)
-
-class _AnnotationWriter:
+class AnnotationWriter:
__metaclass__ = ABCMeta
def __init__(self, file, version):
@@ -1604,7 +650,7 @@ def close_track(self):
def close_root(self):
raise NotImplementedError
-class _XmlAnnotationWriter(_AnnotationWriter):
+class XmlAnnotationWriter(AnnotationWriter):
def __init__(self, file):
super().__init__(file, "1.1")
self.xmlgen = XMLGenerator(self.file, 'utf-8')
@@ -1636,7 +682,7 @@ def _add_meta(self, meta):
self._add_meta(v)
self._indent()
self.xmlgen.endElement(k)
- elif type(v) == list:
+ elif isinstance(v, list):
self._indent()
self.xmlgen.startElement(k, {})
for tup in v:
@@ -1729,196 +775,467 @@ def close_root(self):
self.xmlgen.endElement("annotations")
self.xmlgen.endDocument()
-class _AnnotationForTask(_Annotation):
- def __init__(self, db_task):
- super().__init__(0, db_task.size)
- self.db_task = db_task
+class DataManager:
+ def __init__(self, data):
+ self.data = data
- def init_from_db(self):
- self.reset()
+ def merge(self, data, start_frame, overlap):
+ tags = TagManager(self.data["tags"])
+ tags.merge(data["tags"], start_frame, overlap)
- for db_segment in self.db_task.segment_set.all():
- annotation = _AnnotationForSegment(db_segment)
- annotation.init_from_db()
- self._merge_boxes(annotation.boxes, db_segment.start_frame,
- self.db_task.overlap)
- self._merge_paths(annotation.box_paths, db_segment.start_frame,
- self.db_task.overlap)
- self.polygons.extend(annotation.polygons)
- self.polylines.extend(annotation.polylines)
- self.points.extend(annotation.points)
- self.polygon_paths.extend(annotation.polygon_paths)
- self.polyline_paths.extend(annotation.polyline_paths)
- self.points_paths.extend(annotation.points_paths)
- # FIXME PolyShapes merge???
-
- def _merge_paths(self, paths, start_frame, overlap):
- # 1. Split paths on two parts: new and which can be intersected
- # with existing paths.
- new_paths = [path for path in paths
- if path.frame >= start_frame + overlap]
- int_paths = [path for path in paths
- if path.frame < start_frame + overlap]
- assert len(new_paths) + len(int_paths) == len(paths)
-
- # 4. Find old paths which are intersected with int_paths
- old_paths = []
- for path in self.box_paths:
- box = path.get_interpolated_boxes()[-1]
- if box.frame >= start_frame:
- old_paths.append(path)
-
- # 3. Add new paths as is. It should be done only after old_paths
- # variable is initialized.
- self.box_paths.extend(new_paths)
+ shapes = ShapeManager(self.data["shapes"])
+ shapes.merge(data["shapes"], start_frame, overlap)
- # Nothing to merge. Just add all int_paths if any.
- if not old_paths or not int_paths:
- self.box_paths.extend(int_paths)
- return
+ tracks = TrackManager(self.data["tracks"])
+ tracks.merge(data["tracks"], start_frame, overlap)
- # 4. Build cost matrix for each path and find correspondence using
- # Hungarian algorithm.
- min_cost_thresh = 0.5
- cost_matrix = np.empty(shape=(len(int_paths), len(old_paths)),
- dtype=float)
- for i, int_path in enumerate(int_paths):
- for j, old_path in enumerate(old_paths):
- cost_matrix[i][j] = 1
- if int_path.label.id == old_path.label.id:
- # Here start_frame is the start frame of next segment
- # and stop_frame is the stop frame of current segment
- stop_frame = start_frame + overlap - 1
- int_boxes = int_path.get_interpolated_boxes()
- old_boxes = old_path.get_interpolated_boxes()
- int_boxes = {box.frame:box for box in int_boxes if box.frame <= stop_frame}
- old_boxes = {box.frame:box for box in old_boxes if box.frame >= start_frame}
- assert int_boxes and old_boxes
-
- count, error = 0, 0
- for frame in range(start_frame, stop_frame + 1):
- box0, box1 = int_boxes.get(frame), old_boxes.get(frame)
- if box0 and box1:
- if box0.outside != box1.outside:
- error += 1
- else:
- error += 1 - _calc_box_IoU(box0, box1)
- count += 1
- elif box0 or box1:
- error += 1
- count += 1
-
- cost_matrix[i][j] = error / count
-
- # 6. Find optimal solution using Hungarian algorithm.
- row_ind, col_ind = linear_sum_assignment(cost_matrix)
- int_paths_indexes = list(range(0, len(int_paths)))
- for i, j in zip(row_ind, col_ind):
- # Reject the solution if the cost is too high. Remember
- # inside int_boxes_indexes boxes which were handled.
- if cost_matrix[i][j] <= min_cost_thresh:
- old_paths[j].merge(int_paths[i])
- int_paths_indexes[i] = -1
-
- # 7. Add all paths which were not processed.
- for i in int_paths_indexes:
- if i != -1:
- self.box_paths.append(int_paths[i])
-
- def _merge_boxes(self, boxes, start_frame, overlap):
- # 1. Split boxes on two parts: new and which can be intersected
- # with existing boxes.
- new_boxes = [box for box in boxes
- if box.frame >= start_frame + overlap]
- int_boxes = [box for box in boxes
- if box.frame < start_frame + overlap]
- assert len(new_boxes) + len(int_boxes) == len(boxes)
-
- # 2. Convert to more convenient data structure (boxes by frame)
- int_boxes_by_frame = {}
- for box in int_boxes:
- if box.frame in int_boxes_by_frame:
- int_boxes_by_frame[box.frame].append(box)
- else:
- int_boxes_by_frame[box.frame] = [box]
+ def to_shapes(self, end_frame):
+ shapes = self.data["shapes"]
+ tracks = TrackManager(self.data["tracks"])
+
+ return shapes + tracks.to_shapes(end_frame)
+
+ def to_tracks(self):
+ tracks = self.data["tracks"]
+ shapes = ShapeManager(self.data["shapes"])
+
+ return tracks + shapes.to_tracks()
- old_boxes_by_frame = {}
- for box in self.boxes:
- if box.frame >= start_frame:
- if box.frame in old_boxes_by_frame:
- old_boxes_by_frame[box.frame].append(box)
+class ObjectManager:
+ def __init__(self, objects):
+ self.objects = objects
+
+ @staticmethod
+ def _get_objects_by_frame(objects, start_frame):
+ objects_by_frame = {}
+ for obj in objects:
+ if obj["frame"] >= start_frame:
+ if obj["frame"] in objects_by_frame:
+ objects_by_frame[obj["frame"]].append(obj)
else:
- old_boxes_by_frame[box.frame] = [box]
+ objects_by_frame[obj["frame"]] = [obj]
- # 3. Add new boxes as is. It should be done only after old_boxes_by_frame
- # variable is initialized.
- self.boxes.extend(new_boxes)
+ return objects_by_frame
+
+ @staticmethod
+ def _get_cost_threshold():
+ raise NotImplementedError()
+
+ @staticmethod
+ def _calc_objects_similarity(obj0, obj1, start_frame, overlap):
+ raise NotImplementedError()
- # Nothing to merge here. Just add all int_boxes if any.
- if not old_boxes_by_frame or not int_boxes_by_frame:
- self.boxes.extend(int_boxes)
+ @staticmethod
+ def _unite_objects(obj0, obj1):
+ raise NotImplementedError()
+
+ @staticmethod
+ def _modify_unmached_object(obj, end_frame):
+ raise NotImplementedError()
+
+ def merge(self, objects, start_frame, overlap):
+ # 1. Split objects on two parts: new and which can be intersected
+ # with existing objects.
+ new_objects = [obj for obj in objects
+ if obj["frame"] >= start_frame + overlap]
+ int_objects = [obj for obj in objects
+ if obj["frame"] < start_frame + overlap]
+ assert len(new_objects) + len(int_objects) == len(objects)
+
+ # 2. Convert to more convenient data structure (objects by frame)
+ int_objects_by_frame = self._get_objects_by_frame(int_objects, start_frame)
+ old_objects_by_frame = self._get_objects_by_frame(self.objects, start_frame)
+
+ # 3. Add new objects as is. It should be done only after old_objects_by_frame
+ # variable is initialized.
+ self.objects.extend(new_objects)
+
+ # Nothing to merge here. Just add all int_objects if any.
+ if not old_objects_by_frame or not int_objects_by_frame:
+ for frame in old_objects_by_frame:
+ for old_obj in old_objects_by_frame[frame]:
+ self._modify_unmached_object(old_obj, start_frame + overlap)
+ self.objects.extend(int_objects)
return
# 4. Build cost matrix for each frame and find correspondence using
# Hungarian algorithm. In this case min_cost_thresh is stronger
# because we compare only on one frame.
- min_cost_thresh = 0.25
- for frame in int_boxes_by_frame:
- if frame in old_boxes_by_frame:
- int_boxes = int_boxes_by_frame[frame]
- old_boxes = old_boxes_by_frame[frame]
- cost_matrix = np.empty(shape=(len(int_boxes), len(old_boxes)),
+ min_cost_thresh = self._get_cost_threshold()
+ for frame in int_objects_by_frame:
+ if frame in old_objects_by_frame:
+ int_objects = int_objects_by_frame[frame]
+ old_objects = old_objects_by_frame[frame]
+ cost_matrix = np.empty(shape=(len(int_objects), len(old_objects)),
dtype=float)
# 5.1 Construct cost matrix for the frame.
- for i, box0 in enumerate(int_boxes):
- for j, box1 in enumerate(old_boxes):
- if box0.label.id == box1.label.id:
- cost_matrix[i][j] = 1 - _calc_box_IoU(box0, box1)
- else:
- cost_matrix[i][j] = 1
+ for i, int_obj in enumerate(int_objects):
+ for j, old_obj in enumerate(old_objects):
+ cost_matrix[i][j] = 1 - self._calc_objects_similarity(
+ int_obj, old_obj, start_frame, overlap)
# 6. Find optimal solution using Hungarian algorithm.
row_ind, col_ind = linear_sum_assignment(cost_matrix)
- int_boxes_indexes = list(range(0, len(int_boxes)))
+ old_objects_indexes = list(range(0, len(old_objects)))
+ int_objects_indexes = list(range(0, len(int_objects)))
for i, j in zip(row_ind, col_ind):
# Reject the solution if the cost is too high. Remember
- # inside int_boxes_indexes boxes which were handled.
+ # inside int_objects_indexes objects which were handled.
if cost_matrix[i][j] <= min_cost_thresh:
- old_boxes[j].merge(int_boxes[i])
- int_boxes_indexes[i] = -1
+ old_objects[j] = self._unite_objects(int_objects[i], old_objects[j])
+ int_objects_indexes[i] = -1
+ old_objects_indexes[j] = -1
- # 7. Add all boxes which were not processed.
- for i in int_boxes_indexes:
+ # 7. Add all new objects which were not processed.
+ for i in int_objects_indexes:
if i != -1:
- self.boxes.append(int_boxes[i])
+ self.objects.append(int_objects[i])
+
+ # 8. Modify all old objects which were not processed
+ # (e.g. generate a shape with outside=True at the end).
+ for j in old_objects_indexes:
+ if j != -1:
+ self._modify_unmached_object(old_objects[j],
+ start_frame + overlap)
+ else:
+ # We don't have old objects on the frame. Let's add all new ones.
+ self.objects.extend(int_objects_by_frame[frame])
+
+class TagManager(ObjectManager):
+ @staticmethod
+ def _get_cost_threshold():
+ return 0.25
+
+ @staticmethod
+ def _calc_objects_similarity(obj0, obj1, start_frame, overlap):
+ # TODO: improve the trivial implementation, compare attributes
+ return 1 if obj0["label_id"] == obj1["label_id"] else 0
+
+ @staticmethod
+ def _unite_objects(obj0, obj1):
+ # TODO: improve the trivial implementation
+ return obj0 if obj0["frame"] < obj1["frame"] else obj1
+
+ @staticmethod
+ def _modify_unmached_object(obj, end_frame):
+ pass
+
+def pairwise(iterable):
+ a = iter(iterable)
+ return zip(a, a)
+
+class ShapeManager(ObjectManager):
+ def to_tracks(self):
+ tracks = []
+ for shape in self.objects:
+ shape0 = copy.copy(shape)
+ shape0["keyframe"] = True
+ shape0["outside"] = False
+ # TODO: Separate attributes on mutable and unmutable
+ shape0["attributes"] = []
+ shape0.pop("group", None)
+ shape1 = copy.copy(shape0)
+ shape1["outside"] = True
+ shape1["frame"] += 1
+
+ track = {
+ "label_id": shape["label_id"],
+ "frame": shape["frame"],
+ "group": shape.get("group", None),
+ "attributes": shape["attributes"],
+ "shapes": [shape0, shape1]
+ }
+ tracks.append(track)
+
+ return tracks
+
+ @staticmethod
+ def _get_cost_threshold():
+ return 0.25
+
+ @staticmethod
+ def _calc_objects_similarity(obj0, obj1, start_frame, overlap):
+ def _calc_polygons_similarity(p0, p1):
+ overlap_area = p0.intersection(p1).area
+ return overlap_area / (p0.area + p1.area - overlap_area)
+
+ has_same_type = obj0["type"] == obj1["type"]
+ has_same_label = obj0.get("label_id") == obj1.get("label_id")
+ if has_same_type and has_same_label:
+ if obj0["type"] == models.ShapeType.RECTANGLE:
+ p0 = geometry.box(*obj0["points"])
+ p1 = geometry.box(*obj1["points"])
+
+ return _calc_polygons_similarity(p0, p1)
+ elif obj0["type"] == models.ShapeType.POLYGON:
+ p0 = geometry.Polygon(pairwise(obj0["points"]))
+ p1 = geometry.Polygon(pairwise(obj0["points"]))
+
+ return _calc_polygons_similarity(p0, p1)
+ else:
+ return 0 # FIXME: need some similarity for points and polylines
+ return 0
+
+ @staticmethod
+ def _unite_objects(obj0, obj1):
+ # TODO: improve the trivial implementation
+ return obj0 if obj0["frame"] < obj1["frame"] else obj1
+
+ @staticmethod
+ def _modify_unmached_object(obj, end_frame):
+ pass
+
+class TrackManager(ObjectManager):
+ def to_shapes(self, end_frame):
+ shapes = []
+ for track in self.objects:
+ for shape in TrackManager.get_interpolated_shapes(track, 0, end_frame):
+ if not shape["outside"]:
+ shape.pop("outside")
+ shape.pop("keyframe", None)
+ shape["label_id"] = track["label_id"]
+ shape["group"] = track["group"]
+ shape["attributes"] += track["attributes"]
+
+ shapes.append(shape)
+
+ return shapes
+
+ @staticmethod
+ def _get_objects_by_frame(objects, start_frame):
+ # Just for unification. All tracks are assigned on the same frame
+ objects_by_frame = {0: []}
+ for obj in objects:
+ shape = obj["shapes"][-1] # optimization for old tracks
+ if shape["frame"] >= start_frame or not shape["outside"]:
+ objects_by_frame[0].append(obj)
+
+ if not objects_by_frame[0]:
+ objects_by_frame = {}
+
+ return objects_by_frame
+
+ @staticmethod
+ def _get_cost_threshold():
+ return 0.5
+
+ @staticmethod
+ def _calc_objects_similarity(obj0, obj1, start_frame, overlap):
+ if obj0["label_id"] == obj1["label_id"]:
+ # Here start_frame is the start frame of next segment
+ # and stop_frame is the stop frame of current segment
+ # end_frame == stop_frame + 1
+ end_frame = start_frame + overlap
+ obj0_shapes = TrackManager.get_interpolated_shapes(obj0, start_frame, end_frame)
+ obj1_shapes = TrackManager.get_interpolated_shapes(obj1, start_frame, end_frame)
+ obj0_shapes_by_frame = {shape["frame"]:shape for shape in obj0_shapes}
+ obj1_shapes_by_frame = {shape["frame"]:shape for shape in obj1_shapes}
+ assert obj0_shapes_by_frame and obj1_shapes_by_frame
+
+ count, error = 0, 0
+ for frame in range(start_frame, end_frame):
+ shape0 = obj0_shapes_by_frame.get(frame)
+ shape1 = obj1_shapes_by_frame.get(frame)
+ if shape0 and shape1:
+ if shape0["outside"] != shape1["outside"]:
+ error += 1
+ else:
+ error += 1 - ShapeManager._calc_objects_similarity(shape0, shape1, start_frame, overlap)
+ count += 1
+ elif shape0 or shape1:
+ error += 1
+ count += 1
+
+ return 1 - error / count
+ else:
+ return 0
+
+ @staticmethod
+ def _modify_unmached_object(obj, end_frame):
+ shape = obj["shapes"][-1]
+ if not shape["outside"]:
+ shape = copy.deepcopy(shape)
+ shape["frame"] = end_frame
+ shape["outside"] = True
+ obj["shapes"].append(shape)
+
+ @staticmethod
+ def normalize_shape(shape):
+ points = np.asarray(shape["points"]).reshape(-1, 2)
+ broken_line = geometry.LineString(points)
+ points = []
+ for off in range(0, 100, 1):
+ p = broken_line.interpolate(off / 100, True)
+ points.append(p.x)
+ points.append(p.y)
+
+ shape = copy.copy(shape)
+ shape["points"] = points
+
+ return shape
+
+ @staticmethod
+ def get_interpolated_shapes(track, start_frame, end_frame):
+ def interpolate(shape0, shape1):
+ shapes = []
+ is_same_type = shape0["type"] == shape1["type"]
+ is_polygon = shape0["type"] == models.ShapeType.POLYGON
+ is_polyline = shape0["type"] == models.ShapeType.POLYLINE
+ is_same_size = len(shape0["points"]) == len(shape1["points"])
+ if not is_same_type or is_polygon or is_polyline or not is_same_size:
+ shape0 = TrackManager.normalize_shape(shape0)
+ shape1 = TrackManager.normalize_shape(shape1)
+
+ distance = shape1["frame"] - shape0["frame"]
+ step = np.subtract(shape1["points"], shape0["points"]) / distance
+ for frame in range(shape0["frame"] + 1, shape1["frame"]):
+ off = frame - shape0["frame"]
+ points = shape0["points"] + step * off
+ shape = copy.deepcopy(shape0)
+ broken_line = geometry.LineString(points.reshape(-1, 2)).simplify(0.05, False)
+ shape["keyframe"] = False
+ shape["frame"] = frame
+ shape["points"] = [x for p in broken_line.coords for x in p]
+ shapes.append(shape)
+ return shapes
+
+ if track.get("interpolated_shapes"):
+ return track["interpolated_shapes"]
+
+ # TODO: should be return an iterator?
+ shapes = []
+ curr_frame = track["shapes"][0]["frame"]
+ prev_shape = {}
+ for shape in track["shapes"]:
+ if prev_shape:
+ assert shape["frame"] > curr_frame
+ for attr in prev_shape["attributes"]:
+ if attr["spec_id"] not in map(lambda el: el["spec_id"], shape["attributes"]):
+ shape["attributes"].append(copy.deepcopy(attr))
+ if not prev_shape["outside"]:
+ shapes.extend(interpolate(prev_shape, shape))
+
+ shape["keyframe"] = True
+ shapes.append(shape)
+ curr_frame = shape["frame"]
+ prev_shape = shape
+
+ # TODO: Need to modify a client and a database (append "outside" shapes for polytracks)
+ if not prev_shape["outside"] and prev_shape["type"] == models.ShapeType.RECTANGLE:
+ shape = copy.copy(prev_shape)
+ shape["frame"] = end_frame
+ shapes.extend(interpolate(prev_shape, shape))
+
+ track["interpolated_shapes"] = shapes
+
+ return shapes
+
+ @staticmethod
+ def _unite_objects(obj0, obj1):
+ track = obj0 if obj0["frame"] < obj1["frame"] else obj1
+ assert obj0["label_id"] == obj1["label_id"]
+ shapes = {shape["frame"]:shape for shape in obj0["shapes"]}
+ for shape in obj1["shapes"]:
+ frame = shape["frame"]
+ if frame in shapes:
+ shapes[frame] = ShapeManager._unite_objects(shapes[frame], shape)
+ else:
+ shapes[frame] = shape
+
+ track["frame"] = min(obj0["frame"], obj1["frame"])
+ track["shapes"] = list(sorted(shapes.values(), key=lambda shape: shape["frame"]))
+ track["interpolated_shapes"] = []
+
+ return track
+
+class TaskAnnotation:
+ def __init__(self, pk, user):
+ self.user = user
+ self.db_task = models.Task.objects.prefetch_related("image_set").get(id=pk)
+ self.db_jobs = models.Job.objects.select_related("segment").filter(segment__task_id=pk)
+ self.reset()
+
+ def reset(self):
+ self.data = {
+ "version": 0,
+ "tags": [],
+ "shapes": [],
+ "tracks": []
+ }
+
+ def _patch_data(self, data, action):
+ splitted_data = {}
+ jobs = {}
+ for db_job in self.db_jobs:
+ jid = db_job.id
+ start = db_job.segment.start_frame
+ stop = db_job.segment.stop_frame
+ jobs[jid] = { "start": start, "stop": stop }
+ is_frame_inside = lambda x: (start <= int(x['frame']) <= stop)
+ splitted_data[jid] = {
+ "tags": list(filter(is_frame_inside, data['tags'])),
+ "shapes": list(filter(is_frame_inside, data['shapes'])),
+ "tracks": list(filter(lambda y: len(list(filter(is_frame_inside, y['shapes']))), data['tracks']))
+ }
+
+ for jid, job_data in splitted_data.items():
+ if action is None:
+ _data = put_job_data(jid, self.user, job_data)
else:
- # We don't have old boxes on the frame. Let's add all new ones.
- self.boxes.extend(int_boxes_by_frame[frame])
+ _data = patch_job_data(jid, self.user, job_data, action)
+ if _data["version"] > self.data["version"]:
+ self.data["version"] = _data["version"]
+ self._merge_data(_data, jobs[jid]["start"], self.db_task.overlap)
+
+ def _merge_data(self, data, start_frame, overlap):
+ data_manager = DataManager(self.data)
+ data_manager.merge(data, start_frame, overlap)
+
+ def put(self, data):
+ self._patch_data(data, None)
- def dump(self, data_format, scheme, host, plugin_meta_data):
- def _flip_box(box, im_w, im_h):
- box.xbr, box.xtl = im_w - box.xtl, im_w - box.xbr
- box.ybr, box.ytl = im_h - box.ytl, im_h - box.ybr
+ def create(self, data):
+ self._patch_data(data, PatchAction.CREATE)
- def _flip_shape(shape, im_w, im_h):
- points = []
- for p in shape.points.split(' '):
- p = p.split(',')
- points.append({
- 'x': p[0],
- 'y': p[1]
- })
+ def update(self, data):
+ self._patch_data(data, PatchAction.UPDATE)
- for p in points:
- p['x'] = im_w - (float(p['x']) + 1)
- p['y'] = im_h - (float(p['y']) + 1)
+ def delete(self, data=None):
+ if data:
+ self._patch_data(data, PatchAction.DELETE)
+ else:
+ for db_job in self.db_jobs:
+ delete_job_data(db_job.id, self.user)
+
+ def init_from_db(self):
+ self.reset()
- shape.points = ' '.join(['{},{}'.format(point['x'], point['y']) for point in points])
+ for db_job in self.db_jobs:
+ annotation = JobAnnotation(db_job.id, self.user)
+ annotation.init_from_db()
+ if annotation.data["version"] > self.data["version"]:
+ self.data["version"] = annotation.data["version"]
+ db_segment = db_job.segment
+ start_frame = db_segment.start_frame
+ overlap = self.db_task.overlap
+ self._merge_data(annotation.data, start_frame, overlap)
+
+ @staticmethod
+ def _flip_shape(shape, im_w, im_h):
+ for x in range(0, len(shape["points"]), 2):
+ y = x + 1
+ shape["points"][x] = im_w - shape["points"][x]
+ shape["points"][y] = im_w - shape["points"][y]
+ def dump(self, file_path, scheme, host, query_params):
db_task = self.db_task
db_segments = db_task.segment_set.all().prefetch_related('job_set')
db_labels = db_task.label_set.all().prefetch_related('attributespec_set')
- im_meta_data = get_image_meta_cache(db_task)
+ db_label_by_id = {db_label.id:db_label for db_label in db_labels}
+ db_attribute_by_id = {db_attribute.id:db_attribute
+ for db_label in db_labels
+ for db_attribute in db_label.attributespec_set.all()}
+ im_meta_data = get_image_meta_cache(db_task)['original_size']
meta = OrderedDict([
("task", OrderedDict([
@@ -1931,12 +1248,17 @@ def _flip_shape(shape, im_w, im_h):
("flipped", str(db_task.flipped)),
("created", str(timezone.localtime(db_task.created_date))),
("updated", str(timezone.localtime(db_task.updated_date))),
- ("source", db_task.source),
("labels", [
("label", OrderedDict([
("name", db_label.name),
- ("attributes", [("attribute", db_attr.text)
+ ("attributes", [
+ ("attribute", OrderedDict([
+ ("name", db_attr.name),
+ ("mutable", str(db_attr.mutable)),
+ ("input_type", db_attr.input_type),
+ ("default_value", db_attr.default_value),
+ ("values", db_attr.values)]))
for db_attr in db_label.attributespec_set.all()])
])) for db_label in db_labels
]),
@@ -1947,8 +1269,8 @@ def _flip_shape(shape, im_w, im_h):
("start", str(db_segment.start_frame)),
("stop", str(db_segment.stop_frame)),
("url", "{0}://{1}/?id={2}".format(
- scheme, host, db_segment.job_set.all()[0].id))
- ])) for db_segment in db_segments
+ scheme, host, db_segment.job_set.all()[0].id))]
+ )) for db_segment in db_segments
]),
("owner", OrderedDict([
@@ -1959,221 +1281,181 @@ def _flip_shape(shape, im_w, im_h):
("dumped", str(timezone.localtime(timezone.now())))
])
- meta.update(plugin_meta_data)
-
if db_task.mode == "interpolation":
meta["task"]["original_size"] = OrderedDict([
- ("width", str(im_meta_data["original_size"][0]["width"])),
- ("height", str(im_meta_data["original_size"][0]["height"]))
+ ("width", str(im_meta_data[0]["width"])),
+ ("height", str(im_meta_data[0]["height"]))
])
- dump_path = db_task.get_dump_path()
- with open(dump_path, "w") as dump_file:
- dumper = _XmlAnnotationWriter(dump_file)
+ with open(file_path, "w") as dump_file:
+ dumper = XmlAnnotationWriter(dump_file)
dumper.open_root()
dumper.add_meta(meta)
if db_task.mode == "annotation":
+ db_image_by_frame = {db_image.frame:db_image
+ for db_image in db_task.image_set.all()}
shapes = {}
- shapes["boxes"] = {}
- shapes["polygons"] = {}
- shapes["polylines"] = {}
- shapes["points"] = {}
- boxes = self.to_boxes()
- for box in boxes:
- if box.frame not in shapes["boxes"]:
- shapes["boxes"][box.frame] = []
- shapes["boxes"][box.frame].append(box)
-
- polygons = self.to_polygons()
- for polygon in polygons:
- if polygon.frame not in shapes["polygons"]:
- shapes["polygons"][polygon.frame] = []
- shapes["polygons"][polygon.frame].append(polygon)
-
- polylines = self.to_polylines()
- for polyline in polylines:
- if polyline.frame not in shapes["polylines"]:
- shapes["polylines"][polyline.frame] = []
- shapes["polylines"][polyline.frame].append(polyline)
-
- points = self.to_points()
- for points in points:
- if points.frame not in shapes["points"]:
- shapes["points"][points.frame] = []
- shapes["points"][points.frame].append(points)
-
- for frame in sorted(set(list(shapes["boxes"].keys()) +
- list(shapes["polygons"].keys()) +
- list(shapes["polylines"].keys()) +
- list(shapes["points"].keys()))):
-
- link = get_frame_path(db_task.id, frame)
- path = os.readlink(link)
-
- rpath = path.split(os.path.sep)
+ data_manager = DataManager(self.data)
+ for shape in data_manager.to_shapes(db_task.size):
+ frame = shape["frame"]
+ if frame not in shapes:
+ shapes[frame] = []
+ shapes[frame].append(shape)
+
+ for frame in sorted(list(shapes.keys())):
+ db_image = db_image_by_frame[frame]
+
+ rpath = db_image.path.split(os.path.sep)
rpath = os.path.sep.join(rpath[rpath.index(".upload")+1:])
- im_w = im_meta_data['original_size'][frame]['width']
- im_h = im_meta_data['original_size'][frame]['height']
+ im_w = db_image.width
+ im_h = db_image.height
dumper.open_image(OrderedDict([
("id", str(frame)),
("name", rpath),
- ("width", str(im_meta_data['original_size'][frame]["width"])),
- ("height", str(im_meta_data['original_size'][frame]["height"]))
+ ("width", str(im_w)),
+ ("height", str(im_h))
]))
- for shape_type in ["boxes", "polygons", "polylines", "points"]:
- shape_dict = shapes[shape_type]
- if frame in shape_dict:
- for shape in shape_dict[frame]:
- if shape_type == "boxes":
- if db_task.flipped:
- _flip_box(shape, im_w, im_h)
-
- dump_dict = OrderedDict([
- ("label", shape.label.name),
- ("xtl", "{:.2f}".format(shape.xtl)),
- ("ytl", "{:.2f}".format(shape.ytl)),
- ("xbr", "{:.2f}".format(shape.xbr)),
- ("ybr", "{:.2f}".format(shape.ybr)),
- ("occluded", str(int(shape.occluded))),
- ])
- if db_task.z_order:
- dump_dict['z_order'] = str(shape.z_order)
- if shape.group_id:
- dump_dict['group_id'] = str(shape.group_id)
- dumper.open_box(dump_dict)
- else:
- if db_task.flipped:
- _flip_shape(shape, im_w, im_h)
-
- dump_dict = OrderedDict([
- ("label", shape.label.name),
- ("points", ';'.join((
- ','.join((
- "{:.2f}".format(float(p.split(',')[0])),
- "{:.2f}".format(float(p.split(',')[1]))
- )) for p in shape.points.split(' '))
- )),
- ("occluded", str(int(shape.occluded))),
- ])
-
- if db_task.z_order:
- dump_dict['z_order'] = str(shape.z_order)
- if shape.group_id:
- dump_dict['group_id'] = str(shape.group_id)
-
- if shape_type == "polygons":
- dumper.open_polygon(dump_dict)
- elif shape_type == "polylines":
- dumper.open_polyline(dump_dict)
- else:
- dumper.open_points(dump_dict)
-
- for attr in shape.attributes:
- dumper.add_attribute(OrderedDict([
- ("name", attr.name),
- ("value", attr.value)
- ]))
-
- if shape_type == "boxes":
- dumper.close_box()
- elif shape_type == "polygons":
- dumper.close_polygon()
- elif shape_type == "polylines":
- dumper.close_polyline()
- else:
- dumper.close_points()
+ for shape in shapes.get(frame, []):
+ if db_task.flipped:
+ self._flip_shape(shape, im_w, im_h)
+
+ db_label = db_label_by_id[shape["label_id"]]
+
+ dump_data = OrderedDict([
+ ("label", db_label.name),
+ ("occluded", str(int(shape["occluded"]))),
+ ])
+
+ if shape["type"] == models.ShapeType.RECTANGLE:
+ dump_data.update(OrderedDict([
+ ("xtl", "{:.2f}".format(shape["points"][0])),
+ ("ytl", "{:.2f}".format(shape["points"][1])),
+ ("xbr", "{:.2f}".format(shape["points"][2])),
+ ("ybr", "{:.2f}".format(shape["points"][3]))
+ ]))
+ else:
+ dump_data.update(OrderedDict([
+ ("points", ';'.join((
+ ','.join((
+ "{:.2f}".format(x),
+ "{:.2f}".format(y)
+ )) for x,y in pairwise(shape["points"]))
+ )),
+ ]))
+
+ if db_task.z_order:
+ dump_data['z_order'] = str(shape["z_order"])
+ if shape["group"]:
+ dump_data['group_id'] = str(shape["group"])
+
+ if shape["type"] == models.ShapeType.RECTANGLE:
+ dumper.open_box(dump_data)
+ elif shape["type"] == models.ShapeType.POLYGON:
+ dumper.open_polygon(dump_data)
+ elif shape["type"] == models.ShapeType.POLYLINE:
+ dumper.open_polyline(dump_data)
+ elif shape["type"] == models.ShapeType.POINTS:
+ dumper.open_points(dump_data)
+ else:
+ raise NotImplementedError("unknown shape type")
+
+ for attr in shape["attributes"]:
+ db_attribute = db_attribute_by_id[attr["spec_id"]]
+ dumper.add_attribute(OrderedDict([
+ ("name", db_attribute.name),
+ ("value", attr["value"])
+ ]))
+
+ if shape["type"] == models.ShapeType.RECTANGLE:
+ dumper.close_box()
+ elif shape["type"] == models.ShapeType.POLYGON:
+ dumper.close_polygon()
+ elif shape["type"] == models.ShapeType.POLYLINE:
+ dumper.close_polyline()
+ elif shape["type"] == models.ShapeType.POINTS:
+ dumper.close_points()
+ else:
+ raise NotImplementedError("unknown shape type")
dumper.close_image()
else:
- paths = {}
- paths["boxes"] = self.to_box_paths()
- paths["polygons"] = self.to_polygon_paths()
- paths["polylines"] = self.to_polyline_paths()
- paths["points"] = self.to_points_paths()
+ data_manager = DataManager(self.data)
+ tracks = data_manager.to_tracks()
- im_w = im_meta_data['original_size'][0]['width']
- im_h = im_meta_data['original_size'][0]['height']
+ im_w = im_meta_data[0]['width']
+ im_h = im_meta_data[0]['height']
counter = 0
- for shape_type in ["boxes", "polygons", "polylines", "points"]:
- path_list = paths[shape_type]
- for path in path_list:
- path_id = path.client_id if path.client_id != -1 else counter
- counter += 1
- dump_dict = OrderedDict([
- ("id", str(path_id)),
- ("label", path.label.name),
+ for track in tracks:
+ track_id = counter
+ counter += 1
+ db_label = db_label_by_id[track["label_id"]]
+ dump_data = OrderedDict([
+ ("id", str(track_id)),
+ ("label", db_label.name),
+ ])
+ if track["group"]:
+ dump_data['group_id'] = str(track["group"])
+ dumper.open_track(dump_data)
+ for shape in TrackManager.get_interpolated_shapes(
+ track, 0, db_task.size):
+ if db_task.flipped:
+ self._flip_shape(shape, im_w, im_h)
+
+ dump_data = OrderedDict([
+ ("frame", str(shape["frame"])),
+ ("outside", str(int(shape["outside"]))),
+ ("occluded", str(int(shape["occluded"]))),
+ ("keyframe", str(int(shape["keyframe"])))
])
- if path.group_id:
- dump_dict['group_id'] = str(path.group_id)
- dumper.open_track(dump_dict)
- if shape_type == "boxes":
- for box in path.get_interpolated_boxes():
- if db_task.flipped:
- _flip_box(box, im_w, im_h)
- dump_dict = OrderedDict([
- ("frame", str(box.frame)),
- ("xtl", "{:.2f}".format(box.xtl)),
- ("ytl", "{:.2f}".format(box.ytl)),
- ("xbr", "{:.2f}".format(box.xbr)),
- ("ybr", "{:.2f}".format(box.ybr)),
- ("outside", str(int(box.outside))),
- ("occluded", str(int(box.occluded))),
- ("keyframe", str(int(box.keyframe)))
- ])
-
- if db_task.z_order:
- dump_dict["z_order"] = str(box.z_order)
-
- dumper.open_box(dump_dict)
- for attr in path.attributes + box.attributes:
- dumper.add_attribute(OrderedDict([
- ("name", attr.name),
- ("value", attr.value)
- ]))
- dumper.close_box()
+
+ if shape["type"] == models.ShapeType.RECTANGLE:
+ dump_data.update(OrderedDict([
+ ("xtl", "{:.2f}".format(shape["points"][0])),
+ ("ytl", "{:.2f}".format(shape["points"][1])),
+ ("xbr", "{:.2f}".format(shape["points"][2])),
+ ("ybr", "{:.2f}".format(shape["points"][3])),
+ ]))
+ else:
+ dump_data.update(OrderedDict([
+ ("points", ';'.join(['{:.2f},{:.2f}'.format(x, y)
+ for x,y in pairwise(shape["points"])]))
+ ]))
+
+ if db_task.z_order:
+ dump_data["z_order"] = str(shape["z_order"])
+
+ if shape["type"] == models.ShapeType.RECTANGLE:
+ dumper.open_box(dump_data)
+ elif shape["type"] == models.ShapeType.POLYGON:
+ dumper.open_polygon(dump_data)
+ elif shape["type"] == models.ShapeType.POLYLINE:
+ dumper.open_polyline(dump_data)
+ elif shape["type"] == models.ShapeType.POINTS:
+ dumper.open_points(dump_data)
+ else:
+ raise NotImplementedError("unknown shape type")
+
+ for attr in shape.get("attributes", []) + track.get("attributes", []):
+ db_attribute = db_attribute_by_id[attr["spec_id"]]
+ dumper.add_attribute(OrderedDict([
+ ("name", db_attribute.name),
+ ("value", attr["value"])
+ ]))
+
+ if shape["type"] == models.ShapeType.RECTANGLE:
+ dumper.close_box()
+ elif shape["type"] == models.ShapeType.POLYGON:
+ dumper.close_polygon()
+ elif shape["type"] == models.ShapeType.POLYLINE:
+ dumper.close_polyline()
+ elif shape["type"] == models.ShapeType.POINTS:
+ dumper.close_points()
else:
- for shape in path.get_interpolated_shapes():
- if db_task.flipped:
- _flip_shape(shape, im_w, im_h)
- dump_dict = OrderedDict([
- ("frame", str(shape.frame)),
- ("points", ';'.join((
- ','.join((
- "{:.2f}".format(float(p.split(',')[0])),
- "{:.2f}".format(float(p.split(',')[1]))
- )) for p in shape.points.split(' '))
- )),
- ("outside", str(int(shape.outside))),
- ("occluded", str(int(shape.occluded))),
- ("keyframe", str(int(shape.keyframe)))
- ])
-
- if db_task.z_order:
- dump_dict["z_order"] = str(shape.z_order)
-
- if shape_type == "polygons":
- dumper.open_polygon(dump_dict)
- elif shape_type == "polylines":
- dumper.open_polyline(dump_dict)
- else:
- dumper.open_points(dump_dict)
-
- for attr in path.attributes + shape.attributes:
- dumper.add_attribute(OrderedDict([
- ("name", attr.name),
- ("value", attr.value)
- ]))
-
- if shape_type == "polygons":
- dumper.close_polygon()
- elif shape_type == "polylines":
- dumper.close_polyline()
- else:
- dumper.close_points()
- dumper.close_track()
+ raise NotImplementedError("unknown shape type")
+ dumper.close_track()
dumper.close_root()
diff --git a/cvat/apps/engine/apps.py b/cvat/apps/engine/apps.py
index 3570035e52b1..684345bc37dc 100644
--- a/cvat/apps/engine/apps.py
+++ b/cvat/apps/engine/apps.py
@@ -5,7 +5,12 @@
from django.apps import AppConfig
-
class EngineConfig(AppConfig):
- name = 'engine'
+ name = 'cvat.apps.engine'
+
+ def ready(self):
+ from django.db.models.signals import post_save
+ from .signals import update_task_status
+ post_save.connect(update_task_status, sender='engine.Job',
+ dispatch_uid="update_task_status")
diff --git a/cvat/apps/engine/log.py b/cvat/apps/engine/log.py
index 4b065bfc6c36..d54e161a73cd 100644
--- a/cvat/apps/engine/log.py
+++ b/cvat/apps/engine/log.py
@@ -90,7 +90,8 @@ class dotdict(dict):
clogger = dotdict({
'task': TaskClientLoggerStorage(),
- 'job': JobClientLoggerStorage()
+ 'job': JobClientLoggerStorage(),
+ 'glob': logging.getLogger('cvat.client'),
})
slogger = dotdict({
diff --git a/cvat/apps/engine/migrations/0015_db_redesign_20190217.py b/cvat/apps/engine/migrations/0015_db_redesign_20190217.py
new file mode 100644
index 000000000000..d20e4bbb8b9f
--- /dev/null
+++ b/cvat/apps/engine/migrations/0015_db_redesign_20190217.py
@@ -0,0 +1,212 @@
+# Generated by Django 2.1.5 on 2019-02-17 19:32
+
+from django.conf import settings
+from django.db import migrations, models
+import django.db.migrations.operations.special
+import django.db.models.deletion
+import cvat.apps.engine.models
+
+def set_segment_size(apps, schema_editor):
+ Task = apps.get_model('engine', 'Task')
+ for task in Task.objects.all():
+ segment = task.segment_set.first()
+ if segment:
+ task.segment_size = segment.stop_frame - segment.start_frame + 1
+ task.save()
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ migrations.swappable_dependency(settings.AUTH_USER_MODEL),
+ ('engine', '0014_job_max_shape_id'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='task',
+ name='segment_size',
+ field=models.PositiveIntegerField(null=True),
+ ),
+ migrations.RunPython(
+ code=set_segment_size,
+ reverse_code=django.db.migrations.operations.special.RunPython.noop,
+ ),
+ migrations.AlterField(
+ model_name='task',
+ name='segment_size',
+ field=models.PositiveIntegerField(),
+ ),
+ migrations.CreateModel(
+ name='ClientFile',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('file', models.FileField(max_length=1024, storage=cvat.apps.engine.models.MyFileSystemStorage(),
+ upload_to=cvat.apps.engine.models.upload_path_handler)),
+ ('task', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.Task')),
+ ],
+ options={
+ 'default_permissions': (),
+ },
+ ),
+ migrations.CreateModel(
+ name='RemoteFile',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('file', models.CharField(max_length=1024)),
+ ('task', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.Task')),
+ ],
+ options={
+ 'default_permissions': (),
+ },
+ ),
+ migrations.CreateModel(
+ name='ServerFile',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('file', models.CharField(max_length=1024)),
+ ('task', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.Task')),
+ ],
+ options={
+ 'default_permissions': (),
+ },
+ ),
+ migrations.AlterField(
+ model_name='task',
+ name='status',
+ field=models.CharField(choices=[('ANNOTATION', 'annotation'), ('VALIDATION', 'validation'), ('COMPLETED', 'completed')], default=cvat.apps.engine.models.StatusChoice('annotation'), max_length=32),
+ ),
+ migrations.AlterField(
+ model_name='task',
+ name='overlap',
+ field=models.PositiveIntegerField(null=True),
+ ),
+ migrations.RemoveField(
+ model_name='task',
+ name='path',
+ ),
+ migrations.AddField(
+ model_name='task',
+ name='image_quality',
+ field=models.PositiveSmallIntegerField(default=50),
+ ),
+ migrations.CreateModel(
+ name='Plugin',
+ fields=[
+ ('name', models.SlugField(max_length=32, primary_key=True, serialize=False)),
+ ('description', cvat.apps.engine.models.SafeCharField(max_length=8192)),
+ ('created_at', models.DateTimeField(auto_now_add=True)),
+ ('updated_at', models.DateTimeField(auto_now_add=True)),
+ ('maintainer', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='maintainers', to=settings.AUTH_USER_MODEL)),
+ ],
+ options={
+ 'default_permissions': (),
+ },
+ ),
+ migrations.CreateModel(
+ name='PluginOption',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('name', cvat.apps.engine.models.SafeCharField(max_length=32)),
+ ('value', cvat.apps.engine.models.SafeCharField(max_length=1024)),
+ ('plugin', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.Plugin')),
+ ],
+ ),
+ migrations.AlterUniqueTogether(
+ name='label',
+ unique_together={('task', 'name')},
+ ),
+ migrations.AlterUniqueTogether(
+ name='clientfile',
+ unique_together={('task', 'file')},
+ ),
+ migrations.AddField(
+ model_name='attributespec',
+ name='default_value',
+ field=models.CharField(default='', max_length=128),
+ preserve_default=False,
+ ),
+ migrations.AddField(
+ model_name='attributespec',
+ name='input_type',
+ field=models.CharField(choices=[('CHECKBOX', 'checkbox'), ('RADIO', 'radio'), ('NUMBER', 'number'), ('TEXT', 'text'), ('SELECT', 'select')], default='select', max_length=16),
+ preserve_default=False,
+ ),
+ migrations.AddField(
+ model_name='attributespec',
+ name='mutable',
+ field=models.BooleanField(default=True),
+ preserve_default=False,
+ ),
+ migrations.AddField(
+ model_name='attributespec',
+ name='name',
+ field=models.CharField(default='test', max_length=64),
+ preserve_default=False,
+ ),
+ migrations.AddField(
+ model_name='attributespec',
+ name='values',
+ field=models.CharField(default='', max_length=4096),
+ preserve_default=False,
+ ),
+ migrations.AlterField(
+ model_name='job',
+ name='status',
+ field=models.CharField(choices=[('ANNOTATION', 'annotation'), ('VALIDATION', 'validation'), ('COMPLETED', 'completed')], default=cvat.apps.engine.models.StatusChoice('annotation'), max_length=32),
+ ),
+ migrations.AlterField(
+ model_name='attributespec',
+ name='text',
+ field=models.CharField(default='', max_length=1024),
+ ),
+ migrations.AlterField(
+ model_name='attributespec',
+ name='input_type',
+ field=models.CharField(choices=[('checkbox', 'CHECKBOX'), ('radio', 'RADIO'), ('number', 'NUMBER'), ('text', 'TEXT'), ('select', 'SELECT')], max_length=16),
+ ),
+ migrations.AlterField(
+ model_name='task',
+ name='segment_size',
+ field=models.PositiveIntegerField(default=0),
+ ),
+ migrations.AlterField(
+ model_name='job',
+ name='status',
+ field=models.CharField(choices=[('annotation', 'ANNOTATION'), ('validation', 'VALIDATION'), ('completed', 'COMPLETED')], default=cvat.apps.engine.models.StatusChoice('annotation'), max_length=32),
+ ),
+ migrations.AlterField(
+ model_name='task',
+ name='status',
+ field=models.CharField(choices=[('annotation', 'ANNOTATION'), ('validation', 'VALIDATION'), ('completed', 'COMPLETED')], default=cvat.apps.engine.models.StatusChoice('annotation'), max_length=32),
+ ),
+ migrations.CreateModel(
+ name='Image',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('path', models.CharField(max_length=1024)),
+ ('frame', models.PositiveIntegerField()),
+ ('task', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.Task')),
+ ('height', models.PositiveIntegerField()),
+ ('width', models.PositiveIntegerField()),
+ ],
+ options={
+ 'default_permissions': (),
+ },
+ ),
+ migrations.CreateModel(
+ name='Video',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('path', models.CharField(max_length=1024)),
+ ('start_frame', models.PositiveIntegerField()),
+ ('stop_frame', models.PositiveIntegerField()),
+ ('step', models.PositiveIntegerField(default=1)),
+ ('task', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='engine.Task')),
+ ('height', models.PositiveIntegerField()),
+ ('width', models.PositiveIntegerField()),
+ ],
+ options={
+ 'default_permissions': (),
+ },
+ ),
+ ]
diff --git a/cvat/apps/engine/migrations/0016_attribute_spec_20190217.py b/cvat/apps/engine/migrations/0016_attribute_spec_20190217.py
new file mode 100644
index 000000000000..dfb84fff98fd
--- /dev/null
+++ b/cvat/apps/engine/migrations/0016_attribute_spec_20190217.py
@@ -0,0 +1,172 @@
+import os
+import re
+import csv
+from io import StringIO
+from PIL import Image
+from django.db import migrations
+from django.conf import settings
+from cvat.apps.engine.task import _get_mime
+
+def parse_attribute(value):
+ match = re.match(r'^([~@])(\w+)=(\w+):(.+)?$', value)
+ if match:
+ prefix = match.group(1)
+ input_type = match.group(2)
+ name = match.group(3)
+ if match.group(4):
+ values = list(csv.reader(StringIO(match.group(4)),
+ quotechar="'"))[0]
+ else:
+ values = []
+
+ return {'prefix':prefix, 'type':input_type, 'name':name, 'values':values}
+ else:
+ return None
+
+def split_text_attribute(apps, schema_editor):
+ AttributeSpec = apps.get_model('engine', 'AttributeSpec')
+ for attribute in AttributeSpec.objects.all():
+ spec = parse_attribute(attribute.text)
+ if spec:
+ attribute.mutable = (spec['prefix'] == '~')
+ attribute.input_type = spec['type']
+ attribute.name = spec['name']
+ attribute.default_value = spec['values'][0] if spec['values'] else ''
+ attribute.values = '\n'.join(spec['values'])
+ attribute.save()
+
+def join_text_attribute(apps, schema_editor):
+ AttributeSpec = apps.get_model('engine', 'AttributeSpec')
+ for attribute in AttributeSpec.objects.all():
+ attribute.text = ""
+ if attribute.mutable:
+ attribute.text += "~"
+ else:
+ attribute.text += "@"
+
+ attribute.text += attribute.input_type
+ attribute.text += "=" + attribute.name + ":"
+ attribute.text += ",".join(attribute.values.split('\n'))
+ attribute.save()
+
+def _get_task_dirname(task_obj):
+ return os.path.join(settings.DATA_ROOT, str(task_obj.id))
+
+def _get_upload_dirname(task_obj):
+ return os.path.join(_get_task_dirname(task_obj), ".upload")
+
+def _get_frame_path(task_obj, frame):
+ return os.path.join(
+ _get_task_dirname(task_obj),
+ "data",
+ str(int(frame) // 10000),
+ str(int(frame) // 100),
+ str(frame) + '.jpg',
+ )
+
+def fill_task_meta_data_forward(apps, schema_editor):
+ db_alias = schema_editor.connection.alias
+ task_model = apps.get_model('engine', 'Task')
+ video_model = apps.get_model('engine', "Video")
+ image_model = apps.get_model('engine', 'Image')
+
+ for db_task in task_model.objects.all():
+ if db_task.mode == 'interpolation':
+ db_video = video_model()
+ db_video.task_id = db_task.id
+ db_video.start_frame = 0
+ db_video.stop_frame = db_task.size
+ db_video.step = 1
+
+ video = ""
+ for root, _, files in os.walk(_get_upload_dirname(db_task)):
+ fullnames = map(lambda f: os.path.join(root, f), files)
+ videos = list(filter(lambda x: _get_mime(x) == 'video', fullnames))
+ if len(videos):
+ video = videos[0]
+ break
+ db_video.path = video
+ try:
+ image = Image.open(_get_frame_path(db_task, 0))
+ db_video.width = image.width
+ db_video.height = image.height
+ image.close()
+ except FileNotFoundError:
+ db_video.width = 0
+ db_video.height = 0
+
+ db_video.save()
+ else:
+ filenames = []
+ for root, _, files in os.walk(_get_upload_dirname(db_task)):
+ fullnames = map(lambda f: os.path.join(root, f), files)
+ images = filter(lambda x: _get_mime(x) == 'image', fullnames)
+ filenames.extend(images)
+ filenames.sort()
+
+ db_images = []
+ for i, image_path in enumerate(filenames):
+ db_image = image_model()
+ db_image.task_id = db_task.id
+ db_image.path = image_path
+ db_image.frame = i
+ try:
+ image = Image.open(image_path)
+ db_image.width = image.width
+ db_image.height = image.height
+ image.close()
+ except FileNotFoundError:
+ db_image.width = 0
+ db_image.height = 0
+
+ db_images.append(db_image)
+ image_model.objects.using(db_alias).bulk_create(db_images)
+
+def fill_task_meta_data_backward(apps, schema_editor):
+ task_model = apps.get_model('engine', 'Task')
+ video_model = apps.get_model('engine', "Video")
+ image_model = apps.get_model('engine', 'Image')
+
+ for db_task in task_model.objects.all():
+ upload_dir = _get_upload_dirname(db_task)
+ if db_task.mode == 'interpolation':
+ video = video_model.objects.get(task__id=db_task.id)
+ db_task.source = os.path.relpath(video.path, upload_dir)
+ video.delete()
+ else:
+ images = image_model.objects.filter(task__id=db_task.id)
+ db_task.source = '{} images: {}, ...'.format(
+ len(images),
+ ", ".join([os.path.relpath(x.path, upload_dir) for x in images[0:2]])
+ )
+ images.delete()
+ db_task.save()
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('engine', '0015_db_redesign_20190217'),
+ ]
+
+ operations = [
+ migrations.RunPython(
+ code=split_text_attribute,
+ reverse_code=join_text_attribute,
+ ),
+ migrations.RemoveField(
+ model_name='attributespec',
+ name='text',
+ ),
+ migrations.AlterUniqueTogether(
+ name='attributespec',
+ unique_together={('label', 'name')},
+ ),
+ migrations.RunPython(
+ code=fill_task_meta_data_forward,
+ reverse_code=fill_task_meta_data_backward,
+ ),
+ migrations.RemoveField(
+ model_name='task',
+ name='source',
+ ),
+ ]
diff --git a/cvat/apps/engine/migrations/0017_db_redesign_20190221.py b/cvat/apps/engine/migrations/0017_db_redesign_20190221.py
new file mode 100644
index 000000000000..9c54bacd4930
--- /dev/null
+++ b/cvat/apps/engine/migrations/0017_db_redesign_20190221.py
@@ -0,0 +1,915 @@
+# Generated by Django 2.1.5 on 2019-02-21 12:25
+
+import cvat.apps.engine.models
+from django.db import migrations, models
+import django.db.models.deletion
+from django.conf import settings
+from cvat.apps.engine.annotation import _merge_table_rows
+
+# some modified functions to transer annotation
+def _bulk_create(db_model, db_alias, objects, flt_param):
+ if objects:
+ if flt_param:
+ if 'postgresql' in settings.DATABASES["default"]["ENGINE"]:
+ return db_model.objects.using(db_alias).bulk_create(objects)
+ else:
+ ids = list(db_model.objects.using(db_alias).filter(**flt_param).values_list('id', flat=True))
+ db_model.objects.using(db_alias).bulk_create(objects)
+
+ return list(db_model.objects.using(db_alias).exclude(id__in=ids).filter(**flt_param))
+ else:
+ return db_model.objects.using(db_alias).bulk_create(objects)
+
+def get_old_db_shapes(shape_type, db_job):
+ def _get_shape_set(db_job, shape_type):
+ if shape_type == 'polygons':
+ return db_job.labeledpolygon_set
+ elif shape_type == 'polylines':
+ return db_job.labeledpolyline_set
+ elif shape_type == 'boxes':
+ return db_job.labeledbox_set
+ elif shape_type == 'points':
+ return db_job.labeledpoints_set
+
+ def get_values(shape_type):
+ if shape_type == 'polygons':
+ return [
+ ('id', 'frame', 'points', 'label_id', 'group_id', 'occluded', 'z_order', 'client_id',
+ 'labeledpolygonattributeval__value', 'labeledpolygonattributeval__spec_id',
+ 'labeledpolygonattributeval__id'), {
+ 'attributes': [
+ 'labeledpolygonattributeval__value',
+ 'labeledpolygonattributeval__spec_id',
+ 'labeledpolygonattributeval__id'
+ ]
+ }, 'labeledpolygonattributeval_set'
+ ]
+ elif shape_type == 'polylines':
+ return [
+ ('id', 'frame', 'points', 'label_id', 'group_id', 'occluded', 'z_order', 'client_id',
+ 'labeledpolylineattributeval__value', 'labeledpolylineattributeval__spec_id',
+ 'labeledpolylineattributeval__id'), {
+ 'attributes': [
+ 'labeledpolylineattributeval__value',
+ 'labeledpolylineattributeval__spec_id',
+ 'labeledpolylineattributeval__id'
+ ]
+ }, 'labeledpolylineattributeval_set'
+ ]
+ elif shape_type == 'boxes':
+ return [
+ ('id', 'frame', 'xtl', 'ytl', 'xbr', 'ybr', 'label_id', 'group_id', 'occluded', 'z_order', 'client_id',
+ 'labeledboxattributeval__value', 'labeledboxattributeval__spec_id',
+ 'labeledboxattributeval__id'), {
+ 'attributes': [
+ 'labeledboxattributeval__value',
+ 'labeledboxattributeval__spec_id',
+ 'labeledboxattributeval__id'
+ ]
+ }, 'labeledboxattributeval_set'
+ ]
+ elif shape_type == 'points':
+ return [
+ ('id', 'frame', 'points', 'label_id', 'group_id', 'occluded', 'z_order', 'client_id',
+ 'labeledpointsattributeval__value', 'labeledpointsattributeval__spec_id',
+ 'labeledpointsattributeval__id'), {
+ 'attributes': [
+ 'labeledpointsattributeval__value',
+ 'labeledpointsattributeval__spec_id',
+ 'labeledpointsattributeval__id'
+ ]
+ }, 'labeledpointsattributeval_set'
+ ]
+ (values, merge_keys, prefetch) = get_values(shape_type)
+ db_shapes = list(_get_shape_set(db_job, shape_type).prefetch_related(prefetch).values(*values).order_by('frame'))
+ return _merge_table_rows(db_shapes, merge_keys, 'id')
+
+def get_old_db_paths(db_job):
+ db_paths = db_job.objectpath_set
+ for shape in ['trackedpoints_set', 'trackedbox_set', 'trackedpolyline_set', 'trackedpolygon_set']:
+ db_paths.prefetch_related(shape)
+ for shape_attr in ['trackedpoints_set__trackedpointsattributeval_set', 'trackedbox_set__trackedboxattributeval_set',
+ 'trackedpolygon_set__trackedpolygonattributeval_set', 'trackedpolyline_set__trackedpolylineattributeval_set']:
+ db_paths.prefetch_related(shape_attr)
+ db_paths.prefetch_related('objectpathattributeval_set')
+ db_paths = list (db_paths.values('id', 'frame', 'group_id', 'shapes', 'client_id', 'objectpathattributeval__spec_id',
+ 'objectpathattributeval__id', 'objectpathattributeval__value',
+ 'trackedbox', 'trackedpolygon', 'trackedpolyline', 'trackedpoints',
+ 'trackedbox__id', 'label_id', 'trackedbox__xtl', 'trackedbox__ytl',
+ 'trackedbox__xbr', 'trackedbox__ybr', 'trackedbox__frame', 'trackedbox__occluded',
+ 'trackedbox__z_order','trackedbox__outside', 'trackedbox__trackedboxattributeval__spec_id',
+ 'trackedbox__trackedboxattributeval__value', 'trackedbox__trackedboxattributeval__id',
+ 'trackedpolygon__id' ,'trackedpolygon__points', 'trackedpolygon__frame', 'trackedpolygon__occluded',
+ 'trackedpolygon__z_order', 'trackedpolygon__outside', 'trackedpolygon__trackedpolygonattributeval__spec_id',
+ 'trackedpolygon__trackedpolygonattributeval__value', 'trackedpolygon__trackedpolygonattributeval__id',
+ 'trackedpolyline__id', 'trackedpolyline__points', 'trackedpolyline__frame', 'trackedpolyline__occluded',
+ 'trackedpolyline__z_order', 'trackedpolyline__outside', 'trackedpolyline__trackedpolylineattributeval__spec_id',
+ 'trackedpolyline__trackedpolylineattributeval__value', 'trackedpolyline__trackedpolylineattributeval__id',
+ 'trackedpoints__id', 'trackedpoints__points', 'trackedpoints__frame', 'trackedpoints__occluded',
+ 'trackedpoints__z_order', 'trackedpoints__outside', 'trackedpoints__trackedpointsattributeval__spec_id',
+ 'trackedpoints__trackedpointsattributeval__value', 'trackedpoints__trackedpointsattributeval__id')
+ .order_by('id', 'trackedbox__frame', 'trackedpolygon__frame', 'trackedpolyline__frame', 'trackedpoints__frame'))
+
+ db_box_paths = list(filter(lambda path: path['shapes'] == 'boxes', db_paths ))
+ db_polygon_paths = list(filter(lambda path: path['shapes'] == 'polygons', db_paths ))
+ db_polyline_paths = list(filter(lambda path: path['shapes'] == 'polylines', db_paths ))
+ db_points_paths = list(filter(lambda path: path['shapes'] == 'points', db_paths ))
+
+ object_path_attr_merge_key = [
+ 'objectpathattributeval__value',
+ 'objectpathattributeval__spec_id',
+ 'objectpathattributeval__id'
+ ]
+
+ db_box_paths = _merge_table_rows(db_box_paths, {
+ 'attributes': object_path_attr_merge_key,
+ 'shapes': [
+ 'trackedbox__id', 'trackedbox__xtl', 'trackedbox__ytl',
+ 'trackedbox__xbr', 'trackedbox__ybr', 'trackedbox__frame',
+ 'trackedbox__occluded', 'trackedbox__z_order', 'trackedbox__outside',
+ 'trackedbox__trackedboxattributeval__value',
+ 'trackedbox__trackedboxattributeval__spec_id',
+ 'trackedbox__trackedboxattributeval__id'
+ ],
+ }, 'id')
+
+ db_polygon_paths = _merge_table_rows(db_polygon_paths, {
+ 'attributes': object_path_attr_merge_key,
+ 'shapes': [
+ 'trackedpolygon__id', 'trackedpolygon__points', 'trackedpolygon__frame',
+ 'trackedpolygon__occluded', 'trackedpolygon__z_order', 'trackedpolygon__outside',
+ 'trackedpolygon__trackedpolygonattributeval__value',
+ 'trackedpolygon__trackedpolygonattributeval__spec_id',
+ 'trackedpolygon__trackedpolygonattributeval__id'
+ ]
+ }, 'id')
+
+ db_polyline_paths = _merge_table_rows(db_polyline_paths, {
+ 'attributes': object_path_attr_merge_key,
+ 'shapes': [
+ 'trackedpolyline__id', 'trackedpolyline__points', 'trackedpolyline__frame',
+ 'trackedpolyline__occluded', 'trackedpolyline__z_order', 'trackedpolyline__outside',
+ 'trackedpolyline__trackedpolylineattributeval__value',
+ 'trackedpolyline__trackedpolylineattributeval__spec_id',
+ 'trackedpolyline__trackedpolylineattributeval__id'
+ ],
+ }, 'id')
+
+ db_points_paths = _merge_table_rows(db_points_paths, {
+ 'attributes': object_path_attr_merge_key,
+ 'shapes': [
+ 'trackedpoints__id', 'trackedpoints__points', 'trackedpoints__frame',
+ 'trackedpoints__occluded', 'trackedpoints__z_order', 'trackedpoints__outside',
+ 'trackedpoints__trackedpointsattributeval__value',
+ 'trackedpoints__trackedpointsattributeval__spec_id',
+ 'trackedpoints__trackedpointsattributeval__id'
+ ]
+ }, 'id')
+
+ for db_box_path in db_box_paths:
+ db_box_path.attributes = list(set(db_box_path.attributes))
+ db_box_path.type = 'box_path'
+ db_box_path.shapes = _merge_table_rows(db_box_path.shapes, {
+ 'attributes': [
+ 'trackedboxattributeval__value',
+ 'trackedboxattributeval__spec_id',
+ 'trackedboxattributeval__id'
+ ]
+ }, 'id')
+
+ for db_polygon_path in db_polygon_paths:
+ db_polygon_path.attributes = list(set(db_polygon_path.attributes))
+ db_polygon_path.type = 'poligon_path'
+ db_polygon_path.shapes = _merge_table_rows(db_polygon_path.shapes, {
+ 'attributes': [
+ 'trackedpolygonattributeval__value',
+ 'trackedpolygonattributeval__spec_id',
+ 'trackedpolygonattributeval__id'
+ ]
+ }, 'id')
+
+ for db_polyline_path in db_polyline_paths:
+ db_polyline_path.attributes = list(set(db_polyline_path.attributes))
+ db_polyline_path.type = 'polyline_path'
+ db_polyline_path.shapes = _merge_table_rows(db_polyline_path.shapes, {
+ 'attributes': [
+ 'trackedpolylineattributeval__value',
+ 'trackedpolylineattributeval__spec_id',
+ 'trackedpolylineattributeval__id'
+ ]
+ }, 'id')
+
+ for db_points_path in db_points_paths:
+ db_points_path.attributes = list(set(db_points_path.attributes))
+ db_points_path.type = 'points_path'
+ db_points_path.shapes = _merge_table_rows(db_points_path.shapes, {
+ 'attributes': [
+ 'trackedpointsattributeval__value',
+ 'trackedpointsattributeval__spec_id',
+ 'trackedpointsattributeval__id'
+ ]
+ }, 'id')
+ return db_box_paths + db_polygon_paths + db_polyline_paths + db_points_paths
+
+def process_shapes(db_job, apps, db_labels, db_attributes, db_alias):
+ LabeledShape = apps.get_model('engine', 'LabeledShape')
+ LabeledShapeAttributeVal = apps.get_model('engine', 'LabeledShapeAttributeVal')
+ new_db_shapes = []
+ new_db_attrvals = []
+ for shape_type in ['boxes', 'points', 'polygons', 'polylines']:
+ for shape in get_old_db_shapes(shape_type, db_job):
+ new_db_shape = LabeledShape()
+ new_db_shape.job = db_job
+ new_db_shape.label = db_labels[shape.label_id]
+ new_db_shape.group = shape.group_id
+
+ if shape_type == 'boxes':
+ new_db_shape.type = cvat.apps.engine.models.ShapeType.RECTANGLE
+ new_db_shape.points = [shape.xtl, shape.ytl, shape.xbr, shape.ybr]
+ else:
+ new_db_shape.points = shape.points.replace(',', ' ').split()
+ if shape_type == 'points':
+ new_db_shape.type = cvat.apps.engine.models.ShapeType.POINTS
+ elif shape_type == 'polygons':
+ new_db_shape.type = cvat.apps.engine.models.ShapeType.POLYGON
+ elif shape_type == 'polylines':
+ new_db_shape.type = cvat.apps.engine.models.ShapeType.POLYLINE
+
+ new_db_shape.frame = shape.frame
+ new_db_shape.occluded = shape.occluded
+ new_db_shape.z_order = shape.z_order
+
+ for attr in shape.attributes:
+ db_attrval = LabeledShapeAttributeVal()
+ db_attrval.shape_id = len(new_db_shapes)
+ db_attrval.spec = db_attributes[attr.spec_id]
+ db_attrval.value = attr.value
+ new_db_attrvals.append(db_attrval)
+
+ new_db_shapes.append(new_db_shape)
+
+ new_db_shapes = _bulk_create(LabeledShape, db_alias, new_db_shapes, {"job_id": db_job.id})
+ for db_attrval in new_db_attrvals:
+ db_attrval.shape_id = new_db_shapes[db_attrval.shape_id].id
+
+ _bulk_create(LabeledShapeAttributeVal, db_alias, new_db_attrvals, {})
+
+def process_paths(db_job, apps, db_labels, db_attributes, db_alias):
+ TrackedShape = apps.get_model('engine', 'TrackedShape')
+ LabeledTrack = apps.get_model('engine', 'LabeledTrack')
+ LabeledTrackAttributeVal = apps.get_model('engine', 'LabeledTrackAttributeVal')
+ TrackedShapeAttributeVal = apps.get_model('engine', 'TrackedShapeAttributeVal')
+ tracks = get_old_db_paths(db_job)
+
+ new_db_tracks = []
+ new_db_track_attrvals = []
+ new_db_shapes = []
+ new_db_shape_attrvals = []
+
+ for track in tracks:
+ db_track = LabeledTrack()
+ db_track.job = db_job
+ db_track.label = db_labels[track.label_id]
+ db_track.frame = track.frame
+ db_track.group = track.group_id
+
+ for attr in track.attributes:
+ db_attrspec = db_attributes[attr.spec_id]
+ db_attrval = LabeledTrackAttributeVal()
+ db_attrval.track_id = len(new_db_tracks)
+ db_attrval.spec = db_attrspec
+ db_attrval.value = attr.value
+ new_db_track_attrvals.append(db_attrval)
+
+ for shape in track.shapes:
+ db_shape = TrackedShape()
+ db_shape.track_id = len(new_db_tracks)
+ db_shape.frame = shape.frame
+ db_shape.occluded = shape.occluded
+ db_shape.z_order = shape.z_order
+ db_shape.outside = shape.outside
+ if track.type == 'box_path':
+ db_shape.type = cvat.apps.engine.models.ShapeType.RECTANGLE
+ db_shape.points = [shape.xtl, shape.ytl, shape.xbr, shape.ybr]
+ else:
+ db_shape.points = shape.points.replace(',', ' ').split()
+ if track.type == 'points_path':
+ db_shape.type = cvat.apps.engine.models.ShapeType.POINTS
+ elif track.type == 'polygon_path':
+ db_shape.type = cvat.apps.engine.models.ShapeType.POLYGON
+ elif track.type == 'polyline_path':
+ db_shape.type = cvat.apps.engine.models.ShapeType.POLYLINE
+
+ for attr in shape.attributes:
+ db_attrspec = db_attributes[attr.spec_id]
+ db_attrval = TrackedShapeAttributeVal()
+ db_attrval.shape_id = len(new_db_shapes)
+ db_attrval.spec = db_attrspec
+ db_attrval.value = attr.value
+ new_db_shape_attrvals.append(db_attrval)
+
+ new_db_shapes.append(db_shape)
+ new_db_tracks.append(db_track)
+
+ new_db_tracks = _bulk_create(LabeledTrack, db_alias, new_db_tracks, {"job_id": db_job.id})
+
+ for db_attrval in new_db_track_attrvals:
+ db_attrval.track_id = new_db_tracks[db_attrval.track_id].id
+ _bulk_create(LabeledTrackAttributeVal, db_alias, new_db_track_attrvals, {})
+
+ for db_shape in new_db_shapes:
+ db_shape.track_id = new_db_tracks[db_shape.track_id].id
+
+ new_db_shapes = _bulk_create(TrackedShape, db_alias, new_db_shapes, {"track__job_id": db_job.id})
+
+ for db_attrval in new_db_shape_attrvals:
+ db_attrval.shape_id = new_db_shapes[db_attrval.shape_id].id
+
+ _bulk_create(TrackedShapeAttributeVal, db_alias, new_db_shape_attrvals, {})
+
+def copy_annotations_forward(apps, schema_editor):
+ db_alias = schema_editor.connection.alias
+ Task = apps.get_model('engine', 'Task')
+ AttributeSpec = apps.get_model('engine', 'AttributeSpec')
+
+
+ for task in Task.objects.all():
+ print("run anno migration for the task {}".format(task.id))
+ db_labels = {db_label.id:db_label for db_label in task.label_set.all()}
+ db_attributes = {db_attr.id:db_attr for db_attr in AttributeSpec.objects.filter(label__task__id=task.id)}
+ for segment in task.segment_set.prefetch_related('job_set').all():
+ db_job = segment.job_set.first()
+ print("run anno migration for the job {}".format(db_job.id))
+ process_shapes(db_job, apps, db_labels, db_attributes, db_alias)
+ process_paths(db_job, apps, db_labels, db_attributes, db_alias)
+
+def _save_old_shapes_to_db(apps, db_shapes, db_attributes, db_alias, db_job):
+ def _get_shape_class(shape_type):
+ if shape_type == 'polygons':
+ return apps.get_model('engine', 'LabeledPolygon')
+ elif shape_type == 'polylines':
+ return apps.get_model('engine', 'LabeledPolyline')
+ elif shape_type == 'boxes':
+ return apps.get_model('engine', 'LabeledBox')
+ elif shape_type == 'points':
+ return apps.get_model('engine', 'LabeledPoints')
+
+ def _get_shape_attr_class(shape_type):
+ if shape_type == 'polygons':
+ return apps.get_model('engine', 'LabeledPolygonAttributeVal')
+ elif shape_type == 'polylines':
+ return apps.get_model('engine', 'LabeledPolylineAttributeVal')
+ elif shape_type == 'boxes':
+ return apps.get_model('engine', 'LabeledBoxAttributeVal')
+ elif shape_type == 'points':
+ return apps.get_model('engine', 'LabeledPointsAttributeVal')
+
+ shapes = [
+ list(filter(lambda s: s.type == cvat.apps.engine.models.ShapeType.RECTANGLE, db_shapes)),
+ list(filter(lambda s: s.type == cvat.apps.engine.models.ShapeType.POLYLINE, db_shapes)),
+ list(filter(lambda s: s.type == cvat.apps.engine.models.ShapeType.POLYGON, db_shapes)),
+ list(filter(lambda s: s.type == cvat.apps.engine.models.ShapeType.POINTS, db_shapes)),
+ ]
+ for i, shape_type in enumerate(['boxes', 'polylines', 'polygons', 'points']):
+ new_db_shapes = []
+ new_db_attrvals = []
+ for shape in shapes[i]:
+ db_shape = _get_shape_class(shape_type)()
+ db_shape.job = shape.job
+ db_shape.label = shape.label
+ db_shape.group_id = shape.group
+ if shape.type == cvat.apps.engine.models.ShapeType.RECTANGLE:
+ db_shape.xtl = shape.points[0]
+ db_shape.ytl = shape.points[1]
+ db_shape.xbr = shape.points[2]
+ db_shape.ybr = shape.points[3]
+ else:
+ point_iterator = iter(shape.points)
+ db_shape.points = ' '.join(['{},{}'.format(point, next(point_iterator)) for point in point_iterator])
+ db_shape.frame = shape.frame
+ db_shape.occluded = shape.occluded
+ db_shape.z_order = shape.z_order
+
+ for attr in list(shape.labeledshapeattributeval_set.all()):
+ db_attrval = _get_shape_attr_class(shape_type)()
+ if shape.type == cvat.apps.engine.models.ShapeType.POLYGON:
+ db_attrval.polygon_id = len(new_db_shapes)
+ elif shape.type == cvat.apps.engine.models.ShapeType.POLYLINE:
+ db_attrval.polyline_id = len(new_db_shapes)
+ elif shape.type == cvat.apps.engine.models.ShapeType.RECTANGLE:
+ db_attrval.box_id = len(new_db_shapes)
+ else:
+ db_attrval.points_id = len(new_db_shapes)
+
+ db_attrval.spec = db_attributes[attr.spec_id]
+ db_attrval.value = attr.value
+ new_db_attrvals.append(db_attrval)
+
+ new_db_shapes.append(db_shape)
+
+ new_db_shapes = _bulk_create(_get_shape_class(shape_type), db_alias, new_db_shapes, {"job_id": db_job.id})
+
+ for db_attrval in new_db_attrvals:
+ if shape_type == 'polygons':
+ db_attrval.polygon_id = new_db_shapes[db_attrval.polygon_id].id
+ elif shape_type == 'polylines':
+ db_attrval.polyline_id = new_db_shapes[db_attrval.polyline_id].id
+ elif shape_type == 'boxes':
+ db_attrval.box_id = new_db_shapes[db_attrval.box_id].id
+ else:
+ db_attrval.points_id = new_db_shapes[db_attrval.points_id].id
+
+ _bulk_create(_get_shape_attr_class(shape_type), db_alias, new_db_attrvals, {})
+
+def _save_old_tracks_to_db(apps, db_shapes, db_attributes, db_alias, db_job):
+ def _get_shape_class(shape_type):
+ if shape_type == 'polygon_paths':
+ return apps.get_model('engine', 'TrackedPolygon')
+ elif shape_type == 'polyline_paths':
+ return apps.get_model('engine', 'TrackedPolyline')
+ elif shape_type == 'box_paths':
+ return apps.get_model('engine', 'TrackedBox')
+ elif shape_type == 'points_paths':
+ return apps.get_model('engine', 'TrackedPoints')
+
+ def _get_shape_attr_class(shape_type):
+ if shape_type == 'polygon_paths':
+ return apps.get_model('engine', 'TrackedPolygonAttributeVal')
+ elif shape_type == 'polyline_paths':
+ return apps.get_model('engine', 'TrackedPolylineAttributeVal')
+ elif shape_type == 'box_paths':
+ return apps.get_model('engine', 'TrackedBoxAttributeVal')
+ elif shape_type == 'points_paths':
+ return apps.get_model('engine', 'TrackedPointsAttributeVal')
+
+ tracks = [
+ list(filter(lambda t: t.trackedshape_set.first().type == cvat.apps.engine.models.ShapeType.RECTANGLE, db_shapes)),
+ list(filter(lambda t: t.trackedshape_set.first().type == cvat.apps.engine.models.ShapeType.POLYLINE, db_shapes)),
+ list(filter(lambda t: t.trackedshape_set.first().type == cvat.apps.engine.models.ShapeType.POLYGON, db_shapes)),
+ list(filter(lambda t: t.trackedshape_set.first().type == cvat.apps.engine.models.ShapeType.POINTS, db_shapes)),
+ ]
+
+ ObjectPath = apps.get_model('engine', 'ObjectPath')
+ ObjectPathAttributeVal = apps.get_model('engine', 'ObjectPathAttributeVal')
+
+ for i, shape_type in enumerate(['box_paths', 'polyline_paths', 'polygon_paths', 'points_paths', ]):
+ new_db_paths = []
+ new_db_path_attrvals = []
+ new_db_shapes = []
+ new_db_shape_attrvals = []
+
+ for path in tracks[i]:
+ db_path = ObjectPath()
+ db_path.job = db_job
+ db_path.label = path.label
+ db_path.frame = path.frame
+ db_path.group_id = path.group
+ # db_path.client_id = path.client_id
+ if shape_type == 'polygon_paths':
+ db_path.shapes = 'polygons'
+ elif shape_type == 'polyline_paths':
+ db_path.shapes = 'polylines'
+ elif shape_type == 'box_paths':
+ db_path.shapes = 'boxes'
+ elif shape_type == 'points_paths':
+ db_path.shapes = 'points'
+
+ for attr in list(path.labeledtrackattributeval_set.all()):
+ db_attrspec = db_attributes[attr.spec_id]
+ db_attrval = ObjectPathAttributeVal()
+ db_attrval.track_id = len(new_db_paths)
+ db_attrval.spec = db_attrspec
+ db_attrval.value = attr.value
+ new_db_path_attrvals.append(db_attrval)
+
+ for shape in list(path.trackedshape_set.all()):
+ db_shape = _get_shape_class(shape_type)()
+ db_shape.track_id = len(new_db_paths)
+ if shape_type == 'box_paths':
+ db_shape.xtl = shape.points[0]
+ db_shape.ytl = shape.points[1]
+ db_shape.xbr = shape.points[2]
+ db_shape.ybr = shape.points[3]
+ else:
+ point_iterator = iter(shape.points)
+ db_shape.points = ' '.join(['{},{}'.format(point, next(point_iterator)) for point in point_iterator])
+
+ db_shape.frame = shape.frame
+ db_shape.occluded = shape.occluded
+ db_shape.z_order = shape.z_order
+ db_shape.outside = shape.outside
+
+ for attr in list(shape.trackedshapeattributeval_set.all()):
+ db_attrspec = db_attributes[attr.spec_id]
+ db_attrval = _get_shape_attr_class(shape_type)()
+ if shape_type == 'polygon_paths':
+ db_attrval.polygon_id = len(new_db_shapes)
+ elif shape_type == 'polyline_paths':
+ db_attrval.polyline_id = len(new_db_shapes)
+ elif shape_type == 'box_paths':
+ db_attrval.box_id = len(new_db_shapes)
+ elif shape_type == 'points_paths':
+ db_attrval.points_id = len(new_db_shapes)
+ db_attrval.spec = db_attrspec
+ db_attrval.value = attr.value
+ new_db_shape_attrvals.append(db_attrval)
+
+ new_db_shapes.append(db_shape)
+ new_db_paths.append(db_path)
+
+ new_db_paths = _bulk_create(ObjectPath, db_alias, new_db_paths, {"job_id": db_job.id})
+
+ for db_attrval in new_db_path_attrvals:
+ db_attrval.track_id = new_db_paths[db_attrval.track_id].id
+ _bulk_create(ObjectPathAttributeVal, db_alias, new_db_path_attrvals, {})
+
+ for db_shape in new_db_shapes:
+ db_shape.track_id = new_db_paths[db_shape.track_id].id
+
+ db_shapes = _bulk_create(_get_shape_class(shape_type), db_alias, new_db_shapes, {"track__job_id": db_job.id})
+
+ for db_attrval in new_db_shape_attrvals:
+ if shape_type == 'polygon_paths':
+ db_attrval.polygon_id = db_shapes[db_attrval.polygon_id].id
+ elif shape_type == 'polyline_paths':
+ db_attrval.polyline_id = db_shapes[db_attrval.polyline_id].id
+ elif shape_type == 'box_paths':
+ db_attrval.box_id = db_shapes[db_attrval.box_id].id
+ elif shape_type == 'points_paths':
+ db_attrval.points_id = db_shapes[db_attrval.points_id].id
+
+ _bulk_create(_get_shape_attr_class(shape_type), db_alias, new_db_shape_attrvals, {})
+
+def copy_annotations_backward(apps, schema_editor):
+ Task = apps.get_model('engine', 'Task')
+ AttributeSpec = apps.get_model('engine', 'AttributeSpec')
+ db_alias = schema_editor.connection.alias
+
+ for task in Task.objects.all():
+ db_attributes = {db_attr.id:db_attr for db_attr in AttributeSpec.objects.filter(label__task__id=task.id)}
+ for segment in task.segment_set.prefetch_related('job_set').all():
+ db_job = segment.job_set.first()
+
+ db_shapes = list(db_job.labeledshape_set
+ .prefetch_related("label")
+ .prefetch_related("labeledshapeattributeval_set"))
+ _save_old_shapes_to_db(apps, db_shapes, db_attributes, db_alias, db_job)
+
+ db_tracks = list(db_job.labeledtrack_set
+ .select_related("label")
+ .prefetch_related("labeledtrackattributeval_set")
+ .prefetch_related("trackedshape_set__trackedshapeattributeval_set"))
+ _save_old_tracks_to_db(apps, db_tracks, db_attributes, db_alias, db_job)
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('engine', '0016_attribute_spec_20190217'),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='LabeledImageAttributeVal',
+ fields=[
+ ('id', models.BigAutoField(primary_key=True, serialize=False)),
+ ('value', cvat.apps.engine.models.SafeCharField(max_length=64)),
+ ('spec', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.AttributeSpec')),
+ ],
+ options={
+ 'abstract': False,
+ 'default_permissions': (),
+ },
+ ),
+ migrations.CreateModel(
+ name='LabeledShapeAttributeVal',
+ fields=[
+ ('id', models.BigAutoField(primary_key=True, serialize=False)),
+ ('value', cvat.apps.engine.models.SafeCharField(max_length=64)),
+ ('spec', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.AttributeSpec')),
+ ],
+ options={
+ 'abstract': False,
+ 'default_permissions': (),
+ },
+ ),
+ migrations.CreateModel(
+ name='LabeledTrackAttributeVal',
+ fields=[
+ ('id', models.BigAutoField(primary_key=True, serialize=False)),
+ ('value', cvat.apps.engine.models.SafeCharField(max_length=64)),
+ ('spec', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.AttributeSpec')),
+ ],
+ options={
+ 'abstract': False,
+ 'default_permissions': (),
+ },
+ ),
+ migrations.CreateModel(
+ name='TrackedShape',
+ fields=[
+ ('type', models.CharField(choices=[('rectangle', 'RECTANGLE'), ('polygon', 'POLYGON'), ('polyline', 'POLYLINE'), ('points', 'POINTS')], max_length=16)),
+ ('occluded', models.BooleanField(default=False)),
+ ('z_order', models.IntegerField(default=0)),
+ ('points', cvat.apps.engine.models.FloatArrayField()),
+ ('id', models.BigAutoField(primary_key=True, serialize=False)),
+ ('frame', models.PositiveIntegerField()),
+ ('outside', models.BooleanField(default=False)),
+ ],
+ options={
+ 'default_permissions': (),
+ },
+ ),
+ migrations.CreateModel(
+ name='TrackedShapeAttributeVal',
+ fields=[
+ ('id', models.BigAutoField(primary_key=True, serialize=False)),
+ ('value', cvat.apps.engine.models.SafeCharField(max_length=64)),
+ ('shape', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.TrackedShape')),
+ ('spec', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.AttributeSpec')),
+ ],
+ options={
+ 'abstract': False,
+ 'default_permissions': (),
+ },
+ ),
+ migrations.CreateModel(
+ name='LabeledImage',
+ fields=[
+ ('id', models.BigAutoField(primary_key=True, serialize=False)),
+ ('frame', models.PositiveIntegerField()),
+ ('group', models.PositiveIntegerField(null=True)),
+ ],
+ options={
+ 'abstract': False,
+ 'default_permissions': (),
+ },
+ ),
+ migrations.CreateModel(
+ name='LabeledShape',
+ fields=[
+ ('id', models.BigAutoField(primary_key=True, serialize=False)),
+ ('frame', models.PositiveIntegerField()),
+ ('group', models.PositiveIntegerField(null=True)),
+ ('type', models.CharField(choices=[('rectangle', 'RECTANGLE'), ('polygon', 'POLYGON'), ('polyline', 'POLYLINE'), ('points', 'POINTS')], max_length=16)),
+ ('occluded', models.BooleanField(default=False)),
+ ('z_order', models.IntegerField(default=0)),
+ ('points', cvat.apps.engine.models.FloatArrayField()),
+ ],
+ options={
+ 'abstract': False,
+ 'default_permissions': (),
+ },
+ ),
+ migrations.CreateModel(
+ name='LabeledTrack',
+ fields=[
+ ('id', models.BigAutoField(primary_key=True, serialize=False)),
+ ('frame', models.PositiveIntegerField()),
+ ('group', models.PositiveIntegerField(null=True)),
+ ],
+ options={
+ 'abstract': False,
+ 'default_permissions': (),
+ },
+ ),
+ migrations.AddField(
+ model_name='labeledimage',
+ name='job',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.Job'),
+ ),
+ migrations.AddField(
+ model_name='labeledtrack',
+ name='job',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.Job'),
+ ),
+ migrations.AddField(
+ model_name='labeledshape',
+ name='job',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.Job'),
+ ),
+ migrations.AddField(
+ model_name='labeledimage',
+ name='label',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.Label'),
+ ),
+ migrations.AddField(
+ model_name='labeledshape',
+ name='label',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.Label'),
+ ),
+ migrations.AddField(
+ model_name='labeledtrack',
+ name='label',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.Label'),
+ ),
+ migrations.AddField(
+ model_name='trackedshape',
+ name='track',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.LabeledTrack'),
+ ),
+ migrations.AddField(
+ model_name='labeledtrackattributeval',
+ name='track',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.LabeledTrack'),
+ ),
+ migrations.AddField(
+ model_name='labeledshapeattributeval',
+ name='shape',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.LabeledShape'),
+ ),
+ migrations.AddField(
+ model_name='labeledimageattributeval',
+ name='image',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.LabeledImage'),
+ ),
+ migrations.RunPython(
+ code=copy_annotations_forward,
+ reverse_code=copy_annotations_backward,
+ ),
+ migrations.RemoveField(
+ model_name='labeledbox',
+ name='job',
+ ),
+ migrations.RemoveField(
+ model_name='labeledbox',
+ name='label',
+ ),
+ migrations.RemoveField(
+ model_name='labeledboxattributeval',
+ name='box',
+ ),
+ migrations.RemoveField(
+ model_name='labeledboxattributeval',
+ name='spec',
+ ),
+ migrations.RemoveField(
+ model_name='labeledpoints',
+ name='job',
+ ),
+ migrations.RemoveField(
+ model_name='labeledpoints',
+ name='label',
+ ),
+ migrations.RemoveField(
+ model_name='labeledpointsattributeval',
+ name='points',
+ ),
+ migrations.RemoveField(
+ model_name='labeledpointsattributeval',
+ name='spec',
+ ),
+ migrations.RemoveField(
+ model_name='labeledpolygon',
+ name='job',
+ ),
+ migrations.RemoveField(
+ model_name='job',
+ name='max_shape_id',
+ ),
+ migrations.RemoveField(
+ model_name='labeledpolygon',
+ name='label',
+ ),
+ migrations.RemoveField(
+ model_name='labeledpolygonattributeval',
+ name='polygon',
+ ),
+ migrations.RemoveField(
+ model_name='labeledpolygonattributeval',
+ name='spec',
+ ),
+ migrations.RemoveField(
+ model_name='labeledpolyline',
+ name='job',
+ ),
+ migrations.RemoveField(
+ model_name='labeledpolyline',
+ name='label',
+ ),
+ migrations.RemoveField(
+ model_name='labeledpolylineattributeval',
+ name='polyline',
+ ),
+ migrations.RemoveField(
+ model_name='labeledpolylineattributeval',
+ name='spec',
+ ),
+ migrations.RemoveField(
+ model_name='objectpath',
+ name='job',
+ ),
+ migrations.RemoveField(
+ model_name='objectpath',
+ name='label',
+ ),
+ migrations.RemoveField(
+ model_name='objectpathattributeval',
+ name='spec',
+ ),
+ migrations.RemoveField(
+ model_name='objectpathattributeval',
+ name='track',
+ ),
+ migrations.RemoveField(
+ model_name='trackedbox',
+ name='track',
+ ),
+ migrations.RemoveField(
+ model_name='trackedboxattributeval',
+ name='box',
+ ),
+ migrations.RemoveField(
+ model_name='trackedboxattributeval',
+ name='spec',
+ ),
+ migrations.RemoveField(
+ model_name='trackedpoints',
+ name='track',
+ ),
+ migrations.RemoveField(
+ model_name='trackedpointsattributeval',
+ name='points',
+ ),
+ migrations.RemoveField(
+ model_name='trackedpointsattributeval',
+ name='spec',
+ ),
+ migrations.RemoveField(
+ model_name='trackedpolygon',
+ name='track',
+ ),
+ migrations.RemoveField(
+ model_name='trackedpolygonattributeval',
+ name='polygon',
+ ),
+ migrations.RemoveField(
+ model_name='trackedpolygonattributeval',
+ name='spec',
+ ),
+ migrations.RemoveField(
+ model_name='trackedpolyline',
+ name='track',
+ ),
+ migrations.RemoveField(
+ model_name='trackedpolylineattributeval',
+ name='polyline',
+ ),
+ migrations.RemoveField(
+ model_name='trackedpolylineattributeval',
+ name='spec',
+ ),
+ migrations.DeleteModel(
+ name='LabeledBox',
+ ),
+ migrations.DeleteModel(
+ name='LabeledBoxAttributeVal',
+ ),
+ migrations.DeleteModel(
+ name='LabeledPoints',
+ ),
+ migrations.DeleteModel(
+ name='LabeledPointsAttributeVal',
+ ),
+ migrations.DeleteModel(
+ name='LabeledPolygon',
+ ),
+ migrations.DeleteModel(
+ name='LabeledPolygonAttributeVal',
+ ),
+ migrations.DeleteModel(
+ name='LabeledPolyline',
+ ),
+ migrations.DeleteModel(
+ name='LabeledPolylineAttributeVal',
+ ),
+ migrations.DeleteModel(
+ name='ObjectPath',
+ ),
+ migrations.DeleteModel(
+ name='ObjectPathAttributeVal',
+ ),
+ migrations.DeleteModel(
+ name='TrackedBox',
+ ),
+ migrations.DeleteModel(
+ name='TrackedBoxAttributeVal',
+ ),
+ migrations.DeleteModel(
+ name='TrackedPoints',
+ ),
+ migrations.DeleteModel(
+ name='TrackedPointsAttributeVal',
+ ),
+ migrations.DeleteModel(
+ name='TrackedPolygon',
+ ),
+ migrations.DeleteModel(
+ name='TrackedPolygonAttributeVal',
+ ),
+ migrations.DeleteModel(
+ name='TrackedPolyline',
+ ),
+ migrations.DeleteModel(
+ name='TrackedPolylineAttributeVal',
+ ),
+ ]
diff --git a/cvat/apps/engine/migrations/0018_jobcommit.py b/cvat/apps/engine/migrations/0018_jobcommit.py
new file mode 100644
index 000000000000..c526cb896435
--- /dev/null
+++ b/cvat/apps/engine/migrations/0018_jobcommit.py
@@ -0,0 +1,31 @@
+# Generated by Django 2.1.7 on 2019-04-17 09:25
+
+from django.conf import settings
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ migrations.swappable_dependency(settings.AUTH_USER_MODEL),
+ ('engine', '0017_db_redesign_20190221'),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='JobCommit',
+ fields=[
+ ('id', models.BigAutoField(primary_key=True, serialize=False)),
+ ('version', models.PositiveIntegerField(default=0)),
+ ('timestamp', models.DateTimeField(auto_now=True)),
+ ('message', models.CharField(default='', max_length=4096)),
+ ('author', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
+ ('job', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='commits', to='engine.Job')),
+ ],
+ options={
+ 'abstract': False,
+ 'default_permissions': (),
+ },
+ ),
+ ]
diff --git a/cvat/apps/engine/models.py b/cvat/apps/engine/models.py
index 1d62f02da38d..736fd6b0f2bc 100644
--- a/cvat/apps/engine/models.py
+++ b/cvat/apps/engine/models.py
@@ -2,42 +2,39 @@
#
# SPDX-License-Identifier: MIT
+from enum import Enum
+
+import shlex
+import os
+
from django.db import models
from django.conf import settings
from django.contrib.auth.models import User
+from django.core.files.storage import FileSystemStorage
-from io import StringIO
-from enum import Enum
-
-import shlex
-import csv
-import re
-import os
+class SafeCharField(models.CharField):
+ def get_prep_value(self, value):
+ value = super().get_prep_value(value)
+ if value:
+ return value[:self.max_length]
+ return value
-class StatusChoice(Enum):
+class StatusChoice(str, Enum):
ANNOTATION = 'annotation'
VALIDATION = 'validation'
COMPLETED = 'completed'
@classmethod
def choices(self):
- return tuple((x.name, x.value) for x in self)
+ return tuple((x.value, x.name) for x in self)
def __str__(self):
return self.value
-class SafeCharField(models.CharField):
- def get_prep_value(self, value):
- value = super().get_prep_value(value)
- if value:
- return value[:self.max_length]
- return value
-
class Task(models.Model):
name = SafeCharField(max_length=256)
size = models.PositiveIntegerField()
- path = models.CharField(max_length=256)
mode = models.CharField(max_length=32)
owner = models.ForeignKey(User, null=True, blank=True,
on_delete=models.SET_NULL, related_name="owners")
@@ -46,45 +43,110 @@ class Task(models.Model):
bug_tracker = models.CharField(max_length=2000, blank=True, default="")
created_date = models.DateTimeField(auto_now_add=True)
updated_date = models.DateTimeField(auto_now_add=True)
- overlap = models.PositiveIntegerField(default=0)
+ overlap = models.PositiveIntegerField(null=True)
+ # Zero means that there are no limits (default)
+ segment_size = models.PositiveIntegerField(default=0)
z_order = models.BooleanField(default=False)
flipped = models.BooleanField(default=False)
- source = SafeCharField(max_length=256, default="unknown")
- status = models.CharField(max_length=32, default=StatusChoice.ANNOTATION)
+ image_quality = models.PositiveSmallIntegerField(default=50)
+ status = models.CharField(max_length=32, choices=StatusChoice.choices(),
+ default=StatusChoice.ANNOTATION)
# Extend default permission model
class Meta:
default_permissions = ()
+ def get_frame_path(self, frame):
+ d1 = str(int(frame) // 10000)
+ d2 = str(int(frame) // 100)
+ path = os.path.join(self.get_data_dirname(), d1, d2,
+ str(frame) + '.jpg')
+
+ return path
+
def get_upload_dirname(self):
- return os.path.join(self.path, ".upload")
+ return os.path.join(self.get_task_dirname(), ".upload")
def get_data_dirname(self):
- return os.path.join(self.path, "data")
-
- def get_dump_path(self):
- name = re.sub(r'[\\/*?:"<>|]', '_', self.name)
- return os.path.join(self.path, "{}.xml".format(name))
+ return os.path.join(self.get_task_dirname(), "data")
def get_log_path(self):
- return os.path.join(self.path, "task.log")
+ return os.path.join(self.get_task_dirname(), "task.log")
def get_client_log_path(self):
- return os.path.join(self.path, "client.log")
+ return os.path.join(self.get_task_dirname(), "client.log")
def get_image_meta_cache_path(self):
- return os.path.join(self.path, "image_meta.cache")
-
- def set_task_dirname(self, path):
- self.path = path
- self.save(update_fields=['path'])
+ return os.path.join(self.get_task_dirname(), "image_meta.cache")
def get_task_dirname(self):
- return self.path
+ return os.path.join(settings.DATA_ROOT, str(self.id))
def __str__(self):
return self.name
+# Redefined a couple of operation for FileSystemStorage to avoid renaming
+# or other side effects.
+class MyFileSystemStorage(FileSystemStorage):
+ def get_valid_name(self, name):
+ return name
+
+ def get_available_name(self, name, max_length=None):
+ if self.exists(name) or (max_length and len(name) > max_length):
+ raise IOError('`{}` file already exists or its name is too long'.format(name))
+ return name
+
+def upload_path_handler(instance, filename):
+ return os.path.join(instance.task.get_upload_dirname(), filename)
+
+# For client files which the user is uploaded
+class ClientFile(models.Model):
+ task = models.ForeignKey(Task, on_delete=models.CASCADE)
+ file = models.FileField(upload_to=upload_path_handler,
+ max_length=1024, storage=MyFileSystemStorage())
+
+ class Meta:
+ default_permissions = ()
+ unique_together = ("task", "file")
+
+# For server files on the mounted share
+class ServerFile(models.Model):
+ task = models.ForeignKey(Task, on_delete=models.CASCADE)
+ file = models.CharField(max_length=1024)
+
+ class Meta:
+ default_permissions = ()
+
+# For URLs
+class RemoteFile(models.Model):
+ task = models.ForeignKey(Task, on_delete=models.CASCADE)
+ file = models.CharField(max_length=1024)
+
+ class Meta:
+ default_permissions = ()
+
+class Video(models.Model):
+ task = models.OneToOneField(Task, on_delete=models.CASCADE)
+ path = models.CharField(max_length=1024)
+ start_frame = models.PositiveIntegerField()
+ stop_frame = models.PositiveIntegerField()
+ step = models.PositiveIntegerField(default=1)
+ width = models.PositiveIntegerField()
+ height = models.PositiveIntegerField()
+
+ class Meta:
+ default_permissions = ()
+
+class Image(models.Model):
+ task = models.ForeignKey(Task, on_delete=models.CASCADE)
+ path = models.CharField(max_length=1024)
+ frame = models.PositiveIntegerField()
+ width = models.PositiveIntegerField()
+ height = models.PositiveIntegerField()
+
+ class Meta:
+ default_permissions = ()
+
class Segment(models.Model):
task = models.ForeignKey(Task, on_delete=models.CASCADE)
start_frame = models.IntegerField()
@@ -96,8 +158,8 @@ class Meta:
class Job(models.Model):
segment = models.ForeignKey(Segment, on_delete=models.CASCADE)
assignee = models.ForeignKey(User, null=True, blank=True, on_delete=models.SET_NULL)
- status = models.CharField(max_length=32, default=StatusChoice.ANNOTATION)
- max_shape_id = models.BigIntegerField(default=-1)
+ status = models.CharField(max_length=32, choices=StatusChoice.choices(),
+ default=StatusChoice.ANNOTATION)
class Meta:
default_permissions = ()
@@ -111,53 +173,37 @@ def __str__(self):
class Meta:
default_permissions = ()
+ unique_together = ('task', 'name')
+class AttributeType(str, Enum):
+ CHECKBOX = 'checkbox'
+ RADIO = 'radio'
+ NUMBER = 'number'
+ TEXT = 'text'
+ SELECT = 'select'
-def parse_attribute(text):
- match = re.match(r'^([~@])(\w+)=(\w+):(.+)?$', text)
- prefix = match.group(1)
- type = match.group(2)
- name = match.group(3)
- if match.group(4):
- values = list(csv.reader(StringIO(match.group(4)), quotechar="'"))[0]
- else:
- values = []
+ @classmethod
+ def choices(self):
+ return tuple((x.value, x.name) for x in self)
- return {'prefix':prefix, 'type':type, 'name':name, 'values':values}
+ def __str__(self):
+ return self.value
class AttributeSpec(models.Model):
label = models.ForeignKey(Label, on_delete=models.CASCADE)
- text = models.CharField(max_length=1024)
+ name = models.CharField(max_length=64)
+ mutable = models.BooleanField()
+ input_type = models.CharField(max_length=16,
+ choices=AttributeType.choices())
+ default_value = models.CharField(max_length=128)
+ values = models.CharField(max_length=4096)
class Meta:
default_permissions = ()
-
- def get_attribute(self):
- return parse_attribute(self.text)
-
- def is_mutable(self):
- attr = self.get_attribute()
- return attr['prefix'] == '~'
-
- def get_type(self):
- attr = self.get_attribute()
- return attr['type']
-
- def get_name(self):
- attr = self.get_attribute()
- return attr['name']
-
- def get_default_value(self):
- attr = self.get_attribute()
- return attr['values'][0]
-
- def get_values(self):
- attr = self.get_attribute()
- return attr['values']
+ unique_together = ('label', 'name')
def __str__(self):
- return self.get_attribute()['name']
-
+ return self.name
class AttributeVal(models.Model):
# TODO: add a validator here to be sure that it corresponds to self.label
@@ -169,103 +215,112 @@ class Meta:
abstract = True
default_permissions = ()
+class ShapeType(str, Enum):
+ RECTANGLE = 'rectangle' # (x0, y0, x1, y1)
+ POLYGON = 'polygon' # (x0, y0, ..., xn, yn)
+ POLYLINE = 'polyline' # (x0, y0, ..., xn, yn)
+ POINTS = 'points' # (x0, y0, ..., xn, yn)
+
+ @classmethod
+ def choices(self):
+ return tuple((x.value, x.name) for x in self)
+
+ def __str__(self):
+ return self.value
class Annotation(models.Model):
- job = models.ForeignKey(Job, on_delete=models.CASCADE)
+ id = models.BigAutoField(primary_key=True)
+ job = models.ForeignKey(Job, on_delete=models.CASCADE)
label = models.ForeignKey(Label, on_delete=models.CASCADE)
frame = models.PositiveIntegerField()
- group_id = models.PositiveIntegerField(default=0)
- client_id = models.BigIntegerField(default=-1)
-
- class Meta:
- abstract = True
-
-class Shape(models.Model):
- occluded = models.BooleanField(default=False)
- z_order = models.IntegerField(default=0)
+ group = models.PositiveIntegerField(null=True)
class Meta:
abstract = True
default_permissions = ()
-class BoundingBox(Shape):
+class Commit(models.Model):
id = models.BigAutoField(primary_key=True)
- xtl = models.FloatField()
- ytl = models.FloatField()
- xbr = models.FloatField()
- ybr = models.FloatField()
+ author = models.ForeignKey(User, null=True, blank=True, on_delete=models.SET_NULL)
+ version = models.PositiveIntegerField(default=0)
+ timestamp = models.DateTimeField(auto_now=True)
+ message = models.CharField(max_length=4096, default="")
class Meta:
abstract = True
default_permissions = ()
-class PolyShape(Shape):
- id = models.BigAutoField(primary_key=True)
- points = models.TextField()
+class JobCommit(Commit):
+ job = models.ForeignKey(Job, on_delete=models.CASCADE, related_name="commits")
+
+class FloatArrayField(models.TextField):
+ separator = ","
+
+ def from_db_value(self, value, expression, connection):
+ if value is None:
+ return value
+ return [float(v) for v in value.split(self.separator)]
+
+ def to_python(self, value):
+ if isinstance(value, list):
+ return value
+
+ return self.from_db_value(value, None, None)
+
+ def get_prep_value(self, value):
+ return self.separator.join(map(str, value))
+
+class Shape(models.Model):
+ type = models.CharField(max_length=16, choices=ShapeType.choices())
+ occluded = models.BooleanField(default=False)
+ z_order = models.IntegerField(default=0)
+ points = FloatArrayField()
class Meta:
abstract = True
default_permissions = ()
-class LabeledBox(Annotation, BoundingBox):
+class LabeledImage(Annotation):
pass
-class LabeledBoxAttributeVal(AttributeVal):
- box = models.ForeignKey(LabeledBox, on_delete=models.CASCADE)
+class LabeledImageAttributeVal(AttributeVal):
+ image = models.ForeignKey(LabeledImage, on_delete=models.CASCADE)
-class LabeledPolygon(Annotation, PolyShape):
+class LabeledShape(Annotation, Shape):
pass
-class LabeledPolygonAttributeVal(AttributeVal):
- polygon = models.ForeignKey(LabeledPolygon, on_delete=models.CASCADE)
+class LabeledShapeAttributeVal(AttributeVal):
+ shape = models.ForeignKey(LabeledShape, on_delete=models.CASCADE)
-class LabeledPolyline(Annotation, PolyShape):
+class LabeledTrack(Annotation):
pass
-class LabeledPolylineAttributeVal(AttributeVal):
- polyline = models.ForeignKey(LabeledPolyline, on_delete=models.CASCADE)
-
-class LabeledPoints(Annotation, PolyShape):
- pass
+class LabeledTrackAttributeVal(AttributeVal):
+ track = models.ForeignKey(LabeledTrack, on_delete=models.CASCADE)
-class LabeledPointsAttributeVal(AttributeVal):
- points = models.ForeignKey(LabeledPoints, on_delete=models.CASCADE)
-
-class ObjectPath(Annotation):
+class TrackedShape(Shape):
id = models.BigAutoField(primary_key=True)
- shapes = models.CharField(max_length=10, default='boxes')
-
-class ObjectPathAttributeVal(AttributeVal):
- track = models.ForeignKey(ObjectPath, on_delete=models.CASCADE)
-
-class TrackedObject(models.Model):
- track = models.ForeignKey(ObjectPath, on_delete=models.CASCADE)
+ track = models.ForeignKey(LabeledTrack, on_delete=models.CASCADE)
frame = models.PositiveIntegerField()
outside = models.BooleanField(default=False)
- class Meta:
- abstract = True
- default_permissions = ()
-class TrackedBox(TrackedObject, BoundingBox):
- pass
+class TrackedShapeAttributeVal(AttributeVal):
+ shape = models.ForeignKey(TrackedShape, on_delete=models.CASCADE)
-class TrackedBoxAttributeVal(AttributeVal):
- box = models.ForeignKey(TrackedBox, on_delete=models.CASCADE)
-class TrackedPolygon(TrackedObject, PolyShape):
- pass
-
-class TrackedPolygonAttributeVal(AttributeVal):
- polygon = models.ForeignKey(TrackedPolygon, on_delete=models.CASCADE)
+class Plugin(models.Model):
+ name = models.SlugField(max_length=32, primary_key=True)
+ description = SafeCharField(max_length=8192)
+ maintainer = models.ForeignKey(User, null=True, blank=True,
+ on_delete=models.SET_NULL, related_name="maintainers")
+ created_at = models.DateTimeField(auto_now_add=True)
+ updated_at = models.DateTimeField(auto_now_add=True)
-class TrackedPolyline(TrackedObject, PolyShape):
- pass
-
-class TrackedPolylineAttributeVal(AttributeVal):
- polyline = models.ForeignKey(TrackedPolyline, on_delete=models.CASCADE)
-
-class TrackedPoints(TrackedObject, PolyShape):
- pass
+ # Extend default permission model
+ class Meta:
+ default_permissions = ()
-class TrackedPointsAttributeVal(AttributeVal):
- points = models.ForeignKey(TrackedPoints, on_delete=models.CASCADE)
+class PluginOption(models.Model):
+ plugin = models.ForeignKey(Plugin, on_delete=models.CASCADE)
+ name = SafeCharField(max_length=32)
+ value = SafeCharField(max_length=1024)
diff --git a/cvat/apps/engine/serializers.py b/cvat/apps/engine/serializers.py
new file mode 100644
index 000000000000..7acc365b4765
--- /dev/null
+++ b/cvat/apps/engine/serializers.py
@@ -0,0 +1,366 @@
+# Copyright (C) 2019 Intel Corporation
+#
+# SPDX-License-Identifier: MIT
+
+import os
+import shutil
+
+from rest_framework import serializers
+from django.contrib.auth.models import User, Group
+
+from cvat.apps.engine import models
+from cvat.apps.engine.log import slogger
+
+
+class AttributeSerializer(serializers.ModelSerializer):
+ class Meta:
+ model = models.AttributeSpec
+ fields = ('id', 'name', 'mutable', 'input_type', 'default_value',
+ 'values')
+
+ # pylint: disable=no-self-use
+ def to_internal_value(self, data):
+ attribute = data.copy()
+ attribute['values'] = '\n'.join(data.get('values', []))
+ return attribute
+
+ def to_representation(self, instance):
+ attribute = super().to_representation(instance)
+ attribute['values'] = attribute['values'].split('\n')
+ return attribute
+
+class LabelSerializer(serializers.ModelSerializer):
+ attributes = AttributeSerializer(many=True, source='attributespec_set',
+ default=[])
+ class Meta:
+ model = models.Label
+ fields = ('id', 'name', 'attributes')
+
+class JobCommitSerializer(serializers.ModelSerializer):
+ class Meta:
+ model = models.JobCommit
+ fields = ('id', 'version', 'author', 'message', 'timestamp')
+
+class JobSerializer(serializers.ModelSerializer):
+ task_id = serializers.ReadOnlyField(source="segment.task.id")
+ start_frame = serializers.ReadOnlyField(source="segment.start_frame")
+ stop_frame = serializers.ReadOnlyField(source="segment.stop_frame")
+
+ class Meta:
+ model = models.Job
+ fields = ('url', 'id', 'assignee', 'status', 'start_frame',
+ 'stop_frame', 'task_id')
+
+class SimpleJobSerializer(serializers.ModelSerializer):
+ class Meta:
+ model = models.Job
+ fields = ('url', 'id', 'assignee', 'status')
+
+class SegmentSerializer(serializers.ModelSerializer):
+ jobs = SimpleJobSerializer(many=True, source='job_set')
+
+ class Meta:
+ model = models.Segment
+ fields = ('start_frame', 'stop_frame', 'jobs')
+
+class ClientFileSerializer(serializers.ModelSerializer):
+ class Meta:
+ model = models.ClientFile
+ fields = ('file', )
+
+ # pylint: disable=no-self-use
+ def to_internal_value(self, data):
+ return {'file': data}
+
+ # pylint: disable=no-self-use
+ def to_representation(self, instance):
+ upload_dir = instance.task.get_upload_dirname()
+ return instance.file.path[len(upload_dir) + 1:]
+
+class ServerFileSerializer(serializers.ModelSerializer):
+ class Meta:
+ model = models.ServerFile
+ fields = ('file', )
+
+ # pylint: disable=no-self-use
+ def to_internal_value(self, data):
+ return {'file': data}
+
+ # pylint: disable=no-self-use
+ def to_representation(self, instance):
+ return instance.file
+
+class RemoteFileSerializer(serializers.ModelSerializer):
+ class Meta:
+ model = models.RemoteFile
+ fields = ('file', )
+
+class RqStatusSerializer(serializers.Serializer):
+ state = serializers.ChoiceField(choices=[
+ "Queued", "Started", "Finished", "Failed"])
+ message = serializers.CharField(allow_blank=True, default="")
+
+class TaskDataSerializer(serializers.ModelSerializer):
+ client_files = ClientFileSerializer(many=True, source='clientfile_set',
+ default=[])
+ server_files = ServerFileSerializer(many=True, source='serverfile_set',
+ default=[])
+ remote_files = RemoteFileSerializer(many=True, source='remotefile_set',
+ default=[])
+
+ class Meta:
+ model = models.Task
+ fields = ('client_files', 'server_files', 'remote_files')
+
+ # pylint: disable=no-self-use
+ def update(self, instance, validated_data):
+ client_files = validated_data.pop('clientfile_set')
+ server_files = validated_data.pop('serverfile_set')
+ remote_files = validated_data.pop('remotefile_set')
+
+ for file in client_files:
+ client_file = models.ClientFile(task=instance, **file)
+ client_file.save()
+
+ for file in server_files:
+ server_file = models.ServerFile(task=instance, **file)
+ server_file.save()
+
+ for file in remote_files:
+ remote_file = models.RemoteFile(task=instance, **file)
+ remote_file.save()
+
+ return instance
+
+class WriteOnceMixin:
+ """Adds support for write once fields to serializers.
+
+ To use it, specify a list of fields as `write_once_fields` on the
+ serializer's Meta:
+ ```
+ class Meta:
+ model = SomeModel
+ fields = '__all__'
+ write_once_fields = ('collection', )
+ ```
+
+ Now the fields in `write_once_fields` can be set during POST (create),
+ but cannot be changed afterwards via PUT or PATCH (update).
+ Inspired by http://stackoverflow.com/a/37487134/627411.
+ """
+
+ def get_extra_kwargs(self):
+ extra_kwargs = super().get_extra_kwargs()
+
+ # We're only interested in PATCH/PUT.
+ if 'update' in getattr(self.context.get('view'), 'action', ''):
+ return self._set_write_once_fields(extra_kwargs)
+
+ return extra_kwargs
+
+ def _set_write_once_fields(self, extra_kwargs):
+ """Set all fields in `Meta.write_once_fields` to read_only."""
+ write_once_fields = getattr(self.Meta, 'write_once_fields', None)
+ if not write_once_fields:
+ return extra_kwargs
+
+ if not isinstance(write_once_fields, (list, tuple)):
+ raise TypeError(
+ 'The `write_once_fields` option must be a list or tuple. '
+ 'Got {}.'.format(type(write_once_fields).__name__)
+ )
+
+ for field_name in write_once_fields:
+ kwargs = extra_kwargs.get(field_name, {})
+ kwargs['read_only'] = True
+ extra_kwargs[field_name] = kwargs
+
+ return extra_kwargs
+
+class TaskSerializer(WriteOnceMixin, serializers.ModelSerializer):
+ labels = LabelSerializer(many=True, source='label_set', partial=True)
+ segments = SegmentSerializer(many=True, source='segment_set', read_only=True)
+ image_quality = serializers.IntegerField(min_value=0, max_value=100)
+
+ class Meta:
+ model = models.Task
+ fields = ('url', 'id', 'name', 'size', 'mode', 'owner', 'assignee',
+ 'bug_tracker', 'created_date', 'updated_date', 'overlap',
+ 'segment_size', 'z_order', 'flipped', 'status', 'labels', 'segments',
+ 'image_quality')
+ read_only_fields = ('size', 'mode', 'created_date', 'updated_date',
+ 'status')
+ write_once_fields = ('overlap', 'segment_size', 'image_quality')
+ ordering = ['-id']
+
+ # pylint: disable=no-self-use
+ def create(self, validated_data):
+ labels = validated_data.pop('label_set')
+ db_task = models.Task.objects.create(size=0, **validated_data)
+ for label in labels:
+ attributes = label.pop('attributespec_set')
+ db_label = models.Label.objects.create(task=db_task, **label)
+ for attr in attributes:
+ models.AttributeSpec.objects.create(label=db_label, **attr)
+
+ task_path = db_task.get_task_dirname()
+ if os.path.isdir(task_path):
+ shutil.rmtree(task_path)
+
+ upload_dir = db_task.get_upload_dirname()
+ os.makedirs(upload_dir)
+ output_dir = db_task.get_data_dirname()
+ os.makedirs(output_dir)
+
+ return db_task
+
+ # pylint: disable=no-self-use
+ def update(self, instance, validated_data):
+ instance.name = validated_data.get('name', instance.name)
+ instance.owner = validated_data.get('owner', instance.owner)
+ instance.assignee = validated_data.get('assignee', instance.assignee)
+ instance.bug_tracker = validated_data.get('bug_tracker',
+ instance.bug_tracker)
+ instance.z_order = validated_data.get('z_order', instance.z_order)
+ instance.flipped = validated_data.get('flipped', instance.flipped)
+ instance.image_quality = validated_data.get('image_quality',
+ instance.image_quality)
+ labels = validated_data.get('label_set', [])
+ for label in labels:
+ attributes = label.pop('attributespec_set', [])
+ (db_label, created) = models.Label.objects.get_or_create(task=instance,
+ name=label['name'])
+ if created:
+ slogger.task[instance.id].info("New {} label was created"
+ .format(db_label.name))
+ else:
+ slogger.task[instance.id].info("{} label was updated"
+ .format(db_label.name))
+ for attr in attributes:
+ (db_attr, created) = models.AttributeSpec.objects.get_or_create(
+ label=db_label, name=attr['name'], defaults=attr)
+ if created:
+ slogger.task[instance.id].info("New {} attribute for {} label was created"
+ .format(db_attr.name, db_label.name))
+ else:
+ slogger.task[instance.id].info("{} attribute for {} label was updated"
+ .format(db_attr.name, db_label.name))
+
+ # FIXME: need to update only "safe" fields
+ db_attr.default_value = attr.get('default_value', db_attr.default_value)
+ db_attr.mutable = attr.get('mutable', db_attr.mutable)
+ db_attr.input_type = attr.get('input_type', db_attr.input_type)
+ db_attr.values = attr.get('values', db_attr.values)
+ db_attr.save()
+
+ return instance
+
+class UserSerializer(serializers.ModelSerializer):
+ groups = serializers.SlugRelatedField(many=True,
+ slug_field='name', queryset=Group.objects.all())
+
+ class Meta:
+ model = User
+ fields = ('url', 'id', 'username', 'first_name', 'last_name', 'email',
+ 'groups', 'is_staff', 'is_superuser', 'is_active', 'last_login',
+ 'date_joined', 'groups')
+ read_only_fields = ('last_login', 'date_joined')
+ write_only_fields = ('password', )
+ ordering = ['-id']
+
+class ExceptionSerializer(serializers.Serializer):
+ system = serializers.CharField(max_length=255)
+ client = serializers.CharField(max_length=255)
+ time = serializers.DateTimeField()
+
+ job_id = serializers.IntegerField(required=False)
+ task_id = serializers.IntegerField(required=False)
+ proj_id = serializers.IntegerField(required=False)
+ client_id = serializers.IntegerField()
+
+ message = serializers.CharField(max_length=4096)
+ filename = serializers.URLField()
+ line = serializers.IntegerField()
+ column = serializers.IntegerField()
+ stack = serializers.CharField(max_length=8192,
+ style={'base_template': 'textarea.html'}, allow_blank=True)
+
+class AboutSerializer(serializers.Serializer):
+ name = serializers.CharField(max_length=128)
+ description = serializers.CharField(max_length=2048)
+ version = serializers.CharField(max_length=64)
+
+class ImageMetaSerializer(serializers.Serializer):
+ width = serializers.IntegerField()
+ height = serializers.IntegerField()
+
+class AttributeValSerializer(serializers.Serializer):
+ spec_id = serializers.IntegerField()
+ value = serializers.CharField(max_length=64, allow_blank=True)
+
+ def to_internal_value(self, data):
+ data['value'] = str(data['value'])
+ return super().to_internal_value(data)
+
+class AnnotationSerializer(serializers.Serializer):
+ id = serializers.IntegerField(default=None, allow_null=True)
+ frame = serializers.IntegerField(min_value=0)
+ label_id = serializers.IntegerField(min_value=0)
+ group = serializers.IntegerField(min_value=0, allow_null=True)
+
+class LabeledImageSerializer(AnnotationSerializer):
+ attributes = AttributeValSerializer(many=True,
+ source="labeledimageattributeval_set")
+
+class ShapeSerializer(serializers.Serializer):
+ type = serializers.ChoiceField(choices=models.ShapeType.choices())
+ occluded = serializers.BooleanField()
+ z_order = serializers.IntegerField(default=0)
+ points = serializers.ListField(
+ child=serializers.FloatField(min_value=0)
+ )
+
+class LabeledShapeSerializer(ShapeSerializer, AnnotationSerializer):
+ attributes = AttributeValSerializer(many=True,
+ source="labeledshapeattributeval_set")
+
+class TrackedShapeSerializer(ShapeSerializer):
+ id = serializers.IntegerField(default=None, allow_null=True)
+ frame = serializers.IntegerField(min_value=0)
+ outside = serializers.BooleanField()
+ attributes = AttributeValSerializer(many=True,
+ source="trackedshapeattributeval_set")
+
+class LabeledTrackSerializer(AnnotationSerializer):
+ shapes = TrackedShapeSerializer(many=True, allow_empty=False,
+ source="trackedshape_set")
+ attributes = AttributeValSerializer(many=True,
+ source="labeledtrackattributeval_set")
+
+class LabeledDataSerializer(serializers.Serializer):
+ version = serializers.IntegerField()
+ tags = LabeledImageSerializer(many=True)
+ shapes = LabeledShapeSerializer(many=True)
+ tracks = LabeledTrackSerializer(many=True)
+
+class FileInfoSerializer(serializers.Serializer):
+ name = serializers.CharField(max_length=1024)
+ type = serializers.ChoiceField(choices=["REG", "DIR"])
+
+class PluginSerializer(serializers.ModelSerializer):
+ class Meta:
+ model = models.Plugin
+ fields = ('name', 'description', 'maintainer', 'created_at',
+ 'updated_at')
+
+class LogEventSerializer(serializers.Serializer):
+ job_id = serializers.IntegerField(required=False)
+ task_id = serializers.IntegerField(required=False)
+ proj_id = serializers.IntegerField(required=False)
+ client_id = serializers.IntegerField()
+
+ name = serializers.CharField(max_length=64)
+ time = serializers.DateTimeField()
+ message = serializers.CharField(max_length=4096, required=False)
+ payload = serializers.DictField(required=False)
+ is_active = serializers.BooleanField()
diff --git a/cvat/apps/engine/signals.py b/cvat/apps/engine/signals.py
new file mode 100644
index 000000000000..f39a96d8672a
--- /dev/null
+++ b/cvat/apps/engine/signals.py
@@ -0,0 +1,19 @@
+# Copyright (C) 2019 Intel Corporation
+#
+# SPDX-License-Identifier: MIT
+
+from .models import Job, StatusChoice
+
+def update_task_status(instance, **kwargs):
+ db_task = instance.segment.task
+ db_jobs = list(Job.objects.filter(segment__task_id=db_task.id))
+ status = StatusChoice.COMPLETED
+ if list(filter(lambda x: x.status == StatusChoice.ANNOTATION, db_jobs)):
+ status = StatusChoice.ANNOTATION
+ elif list(filter(lambda x: x.status == StatusChoice.VALIDATION, db_jobs)):
+ status = StatusChoice.VALIDATION
+
+ if status != db_task.status:
+ db_task.status = status
+ db_task.save()
+
diff --git a/cvat/apps/engine/static/engine/js/3rdparty/defiant.js b/cvat/apps/engine/static/engine/js/3rdparty/defiant.js
deleted file mode 100644
index 743f1bea88d7..000000000000
--- a/cvat/apps/engine/static/engine/js/3rdparty/defiant.js
+++ /dev/null
@@ -1,882 +0,0 @@
-/*
- * defiant.js.js [v1.4.5]
- * http://www.defiantjs.com
- * Copyright (c) 2013-2017, Hakan Bilgin
-
-
-
-
- {% for segm in item.segment_set.all %}
- {% for job in segm.job_set.all %}
-
-
-{{base_url}}?id={{job.id}} | -
- Created objects are being saved..`); + } else if (state.status === 'saveUpdated') { + this._overlay.setMessage(`${this._overlay.getMessage()}
- Updated objects are being saved..`); + } else if (state.status === 'saveDeleted') { + this._overlay.setMessage(`${this._overlay.getMessage()}
- Deleted objects are being saved..`); + } else if (state.status === 'saveUnlocked') { + this._saveButton.prop('disabled', false).text('Save Work'); + } else { + const message = `Unknown state has been reached during annotation saving: ${state.status} ` + + 'Please report the problem to support team immediately.'; + showMessage(message); + } + } +} + + +function buildAnnotationSaver(initialData, shapeCollection) { + const model = new AnnotationSaverModel(initialData, shapeCollection); + const controller = new AnnotationSaverController(model); + new AnnotationSaverView(model, controller); +} diff --git a/cvat/apps/engine/static/engine/js/annotationUI.js b/cvat/apps/engine/static/engine/js/annotationUI.js index 1c1471f225c0..1044fb25cd5b 100644 --- a/cvat/apps/engine/static/engine/js/annotationUI.js +++ b/cvat/apps/engine/static/engine/js/annotationUI.js @@ -18,7 +18,6 @@ HistoryController:false HistoryModel:false HistoryView:false - IncrementIdGenerator:false, Logger:false Mousetrap:false PlayerController:false @@ -28,8 +27,6 @@ PolyshapeEditorModel:false PolyshapeEditorView:false PolyShapeView:false - saveJobRequest:false - serverRequest:false ShapeBufferController:false ShapeBufferModel:false ShapeBufferView:false @@ -47,296 +44,128 @@ ShapeMergerView:false showMessage:false showOverlay:false + buildAnnotationSaver:false + LabelsInfo:false */ -"use strict"; - -function callAnnotationUI(jid) { - initLogger(jid); - let loadJobEvent = Logger.addContinuedEvent(Logger.EventType.loadJob); - serverRequest("/get/job/" + jid, function(job) { - serverRequest("get/annotation/job/" + jid, function(data) { - $('#loadingOverlay').remove(); - setTimeout(() => { - buildAnnotationUI(job, data, loadJobEvent); - }, 0); - }); - }); -} - -function initLogger(jobID) { - if (!Logger.initializeLogger('CVAT', jobID)) - { - let message = 'Could not initialize Logger. Please immediately report the problem to support team'; +async function initLogger(jobID) { + if (!Logger.initializeLogger(jobID)) { + const message = 'Logger has been already initialized'; console.error(message); showMessage(message); return; } Logger.setTimeThreshold(Logger.EventType.zoomImage); - - serverRequest('/get/username', function(response) { - Logger.setUsername(response.username); - }); } -function buildAnnotationUI(job, shapeData, loadJobEvent) { - // Setup some API - window.cvat = { - labelsInfo: new LabelsInfo(job), - translate: new CoordinateTranslator(), - player: { - geometry: { - scale: 1, - }, - frames: { - current: job.start, - start: job.start, - stop: job.stop, - } - }, - mode: null, - job: { - z_order: job.z_order, - id: job.jobid, - images: job.image_meta_data, - }, - search: { - value: window.location.search, - - set: function(name, value) { - let searchParams = new URLSearchParams(this.value); - - if (typeof value === 'undefined' || value === null) { - if (searchParams.has(name)) { - searchParams.delete(name); - } - } - else searchParams.set(name, value); - this.value = `${searchParams.toString()}`; - }, - - get: function(name) { - try { - let decodedURI = decodeURIComponent(this.value); - let urlSearchParams = new URLSearchParams(decodedURI); - if (urlSearchParams.has(name)) { - return urlSearchParams.get(name); - } - else return null; - } - catch (error) { - showMessage('Bad URL has been found'); - this.value = window.location.href; - return null; - } - }, - - toString: function() { - return `${window.location.origin}/?${this.value}`; - } - } - }; - - // Remove external search parameters from url - window.history.replaceState(null, null, `${window.location.origin}/?id=${job.jobid}`); - window.cvat.config = new Config(); +function blurAllElements() { + document.activeElement.blur(); +} - // Setup components - let idGenerator = new IncrementIdGenerator(job.max_shape_id + 1); - let annotationParser = new AnnotationParser(job, window.cvat.labelsInfo, idGenerator); - let shapeCollectionModel = new ShapeCollectionModel(idGenerator).import(shapeData, true); - let shapeCollectionController = new ShapeCollectionController(shapeCollectionModel); - let shapeCollectionView = new ShapeCollectionView(shapeCollectionModel, shapeCollectionController); +function uploadAnnotation(shapeCollectionModel, historyModel, + annotationParser, uploadAnnotationButton) { + $('#annotationFileSelector').one('change', (changedFileEvent) => { + const file = changedFileEvent.target.files['0']; + changedFileEvent.target.value = ''; + if (!file || file.type !== 'text/xml') return; + uploadAnnotationButton.text('Preparing..'); + uploadAnnotationButton.prop('disabled', true); + const overlay = showOverlay('File is being uploaded..'); - // In case of old tasks that dont provide max saved shape id properly - if (job.max_shape_id === -1) { - idGenerator.reset(shapeCollectionModel.maxId + 1); - } + const fileReader = new FileReader(); + fileReader.onload = (loadedFileEvent) => { + let data = null; - window.cvat.data = { - get: () => shapeCollectionModel.exportAll(), - set: (data) => { - for (let type in data) { - for (let shape of data[type]) { - shape.id = idGenerator.next(); + const asyncParse = () => { + try { + data = annotationParser.parse(loadedFileEvent.target.result); + } catch (err) { + overlay.remove(); + showMessage(err.message); + return; + } finally { + uploadAnnotationButton.text('Upload Annotation'); + uploadAnnotationButton.prop('disabled', false); } - } - shapeCollectionModel.import(data, false); - shapeCollectionModel.update(); - }, - clear: () => shapeCollectionModel.empty(), - }; - - let shapeBufferModel = new ShapeBufferModel(shapeCollectionModel); - let shapeBufferController = new ShapeBufferController(shapeBufferModel); - let shapeBufferView = new ShapeBufferView(shapeBufferModel, shapeBufferController); - - $('#shapeModeSelector').prop('value', job.mode); - let shapeCreatorModel = new ShapeCreatorModel(shapeCollectionModel, job); - let shapeCreatorController = new ShapeCreatorController(shapeCreatorModel); - let shapeCreatorView = new ShapeCreatorView(shapeCreatorModel, shapeCreatorController); - - let polyshapeEditorModel = new PolyshapeEditorModel(); - let polyshapeEditorController = new PolyshapeEditorController(polyshapeEditorModel); - let polyshapeEditorView = new PolyshapeEditorView(polyshapeEditorModel, polyshapeEditorController); - - // Add static member for class. It will be used by all polyshapes. - PolyShapeView.editor = polyshapeEditorModel; - - let shapeMergerModel = new ShapeMergerModel(shapeCollectionModel); - let shapeMergerController = new ShapeMergerController(shapeMergerModel); - new ShapeMergerView(shapeMergerModel, shapeMergerController); - - let shapeGrouperModel = new ShapeGrouperModel(shapeCollectionModel); - let shapeGrouperController = new ShapeGrouperController(shapeGrouperModel); - let shapeGrouperView = new ShapeGrouperView(shapeGrouperModel, shapeGrouperController); - - let aamModel = new AAMModel(shapeCollectionModel, (xtl, xbr, ytl, ybr) => { - playerModel.focus(xtl, xbr, ytl, ybr); - }, () => { - playerModel.fit(); - }); - let aamController = new AAMController(aamModel); - new AAMView(aamModel, aamController); - - shapeCreatorModel.subscribe(shapeCollectionModel); - shapeGrouperModel.subscribe(shapeCollectionView); - shapeCollectionModel.subscribe(shapeGrouperModel); - - $('#playerProgress').css('width', $('#player')["0"].clientWidth - 420); - - let playerGeometry = { - width: $('#playerFrame').width(), - height: $('#playerFrame').height(), - }; - - let playerModel = new PlayerModel(job, playerGeometry); - let playerController = new PlayerController(playerModel, - () => shapeCollectionModel.activeShape, - (direction) => shapeCollectionModel.find(direction), - Object.assign({}, playerGeometry, { - left: $('#playerFrame').offset().left, - top: $('#playerFrame').offset().top, - }), job); - new PlayerView(playerModel, playerController, job); - - let historyModel = new HistoryModel(playerModel, idGenerator); - let historyController = new HistoryController(historyModel); - new HistoryView(historyController, historyModel); - - playerModel.subscribe(shapeCollectionModel); - playerModel.subscribe(shapeCollectionView); - playerModel.subscribe(shapeCreatorView); - playerModel.subscribe(shapeBufferView); - playerModel.subscribe(shapeGrouperView); - playerModel.subscribe(polyshapeEditorView); - playerModel.shift(window.cvat.search.get('frame') || 0, true); - - let shortkeys = window.cvat.config.shortkeys; - - setupHelpWindow(shortkeys); - setupSettingsWindow(); - setupMenu(job, shapeCollectionModel, annotationParser, aamModel, playerModel, historyModel); - setupFrameFilters(); - setupShortkeys(shortkeys, { - aam: aamModel, - shapeCreator: shapeCreatorModel, - shapeMerger: shapeMergerModel, - shapeGrouper: shapeGrouperModel, - shapeBuffer: shapeBufferModel, - shapeEditor: polyshapeEditorModel - }); - - $(window).on('click', function(event) { - Logger.updateUserActivityTimer(); - if (event.target.classList.contains('modal') && !event.target.classList.contains('force-modal')) { - event.target.classList.add('hidden'); - } - }); - - let totalStat = shapeCollectionModel.collectStatistic()[1]; - loadJobEvent.addValues({ - 'track count': totalStat.boxes.annotation + totalStat.boxes.interpolation + - totalStat.polygons.annotation + totalStat.polygons.interpolation + - totalStat.polylines.annotation + totalStat.polylines.interpolation + - totalStat.points.annotation + totalStat.points.interpolation, - 'frame count': job.stop - job.start + 1, - 'object count': totalStat.total, - 'box count': totalStat.boxes.annotation + totalStat.boxes.interpolation, - 'polygon count': totalStat.polygons.annotation + totalStat.polygons.interpolation, - 'polyline count': totalStat.polylines.annotation + totalStat.polylines.interpolation, - 'points count': totalStat.points.annotation + totalStat.points.interpolation, - }); - loadJobEvent.close(); - - window.onbeforeunload = function(e) { - if (shapeCollectionModel.hasUnsavedChanges()) { - let message = "You have unsaved changes. Leave this page?"; - e.returnValue = message; - return message; - } - return; - }; - - $('#player').on('click', (e) => { - if (e.target.tagName.toLowerCase() != 'input') { - blurAllElements(); - } - }); -} + const asyncImport = () => { + try { + historyModel.empty(); + shapeCollectionModel.empty(); + shapeCollectionModel.import(data); + shapeCollectionModel.update(); + } finally { + overlay.remove(); + } + }; + overlay.setMessage('Data are being imported..'); + setTimeout(asyncImport); + }; -function copyToClipboard(text) { - let tempInput = $(""); - $("body").append(tempInput); - tempInput.prop('value', text).select(); - document.execCommand("copy"); - tempInput.remove(); + overlay.setMessage('File is being parsed..'); + setTimeout(asyncParse); + }; + fileReader.readAsText(file); + }).click(); } function setupFrameFilters() { - let brightnessRange = $('#playerBrightnessRange'); - let contrastRange = $('#playerContrastRange'); - let saturationRange = $('#playerSaturationRange'); - let frameBackground = $('#frameBackground'); - let reset = $('#resetPlayerFilterButton'); + const brightnessRange = $('#playerBrightnessRange'); + const contrastRange = $('#playerContrastRange'); + const saturationRange = $('#playerSaturationRange'); + const frameBackground = $('#frameBackground'); + const reset = $('#resetPlayerFilterButton'); let brightness = 100; let contrast = 100; let saturation = 100; - let shortkeys = window.cvat.config.shortkeys; + const { shortkeys } = window.cvat.config; + + function updateFilterParameters() { + frameBackground.css('filter', `contrast(${contrast}%) brightness(${brightness}%) saturate(${saturation}%)`); + } + brightnessRange.attr('title', ` - ${shortkeys['change_player_brightness'].view_value} - ${shortkeys['change_player_brightness'].description}`); + ${shortkeys.change_player_brightness.view_value} - ${shortkeys.change_player_brightness.description}`); contrastRange.attr('title', ` - ${shortkeys['change_player_contrast'].view_value} - ${shortkeys['change_player_contrast'].description}`); + ${shortkeys.change_player_contrast.view_value} - ${shortkeys.change_player_contrast.description}`); saturationRange.attr('title', ` - ${shortkeys['change_player_saturation'].view_value} - ${shortkeys['change_player_saturation'].description}`); + ${shortkeys.change_player_saturation.view_value} - ${shortkeys.change_player_saturation.description}`); - let changeBrightnessHandler = Logger.shortkeyLogDecorator(function(e) { - if (e.shiftKey) brightnessRange.prop('value', brightness + 10).trigger('input'); - else brightnessRange.prop('value', brightness - 10).trigger('input'); + const changeBrightnessHandler = Logger.shortkeyLogDecorator((e) => { + if (e.shiftKey) { + brightnessRange.prop('value', brightness + 10).trigger('input'); + } else { + brightnessRange.prop('value', brightness - 10).trigger('input'); + } }); - let changeContrastHandler = Logger.shortkeyLogDecorator(function(e) { - if (e.shiftKey) contrastRange.prop('value', contrast + 10).trigger('input'); - else contrastRange.prop('value', contrast - 10).trigger('input'); + const changeContrastHandler = Logger.shortkeyLogDecorator((e) => { + if (e.shiftKey) { + contrastRange.prop('value', contrast + 10).trigger('input'); + } else { + contrastRange.prop('value', contrast - 10).trigger('input'); + } }); - let changeSaturationHandler = Logger.shortkeyLogDecorator(function(e) { - if (e.shiftKey) saturationRange.prop('value', saturation + 10).trigger('input'); - else saturationRange.prop('value', saturation - 10).trigger('input'); + const changeSaturationHandler = Logger.shortkeyLogDecorator((e) => { + if (e.shiftKey) { + saturationRange.prop('value', saturation + 10).trigger('input'); + } else { + saturationRange.prop('value', saturation - 10).trigger('input'); + } }); - Mousetrap.bind(shortkeys["change_player_brightness"].value, changeBrightnessHandler, 'keydown'); - Mousetrap.bind(shortkeys["change_player_contrast"].value, changeContrastHandler, 'keydown'); - Mousetrap.bind(shortkeys["change_player_saturation"].value, changeSaturationHandler, 'keydown'); + Mousetrap.bind(shortkeys.change_player_brightness.value, changeBrightnessHandler, 'keydown'); + Mousetrap.bind(shortkeys.change_player_contrast.value, changeContrastHandler, 'keydown'); + Mousetrap.bind(shortkeys.change_player_saturation.value, changeSaturationHandler, 'keydown'); - reset.on('click', function() { + reset.on('click', () => { brightness = 100; contrast = 100; saturation = 100; @@ -346,74 +175,61 @@ function setupFrameFilters() { updateFilterParameters(); }); - brightnessRange.on('input', function(e) { - let value = Math.clamp(+e.target.value, +e.target.min, +e.target.max); - brightness = e.target.value = value; + brightnessRange.on('input', (e) => { + const value = Math.clamp(+e.target.value, +e.target.min, +e.target.max); + e.target.value = value; + brightness = value; updateFilterParameters(); }); - contrastRange.on('input', function(e) { - let value = Math.clamp(+e.target.value, +e.target.min, +e.target.max); - contrast = e.target.value = value; + contrastRange.on('input', (e) => { + const value = Math.clamp(+e.target.value, +e.target.min, +e.target.max); + e.target.value = value; + contrast = value; updateFilterParameters(); }); - saturationRange.on('input', function(e) { - let value = Math.clamp(+e.target.value, +e.target.min, +e.target.max); - saturation = e.target.value = value; + saturationRange.on('input', (e) => { + const value = Math.clamp(+e.target.value, +e.target.min, +e.target.max); + e.target.value = value; + saturation = value; updateFilterParameters(); }); - - function updateFilterParameters() { - frameBackground.css('filter', `contrast(${contrast}%) brightness(${brightness}%) saturate(${saturation}%)`); - } } function setupShortkeys(shortkeys, models) { - let annotationMenu = $('#annotationMenu'); - let settingsWindow = $('#settingsWindow'); - let helpWindow = $('#helpWindow'); + const annotationMenu = $('#annotationMenu'); + const settingsWindow = $('#settingsWindow'); + const helpWindow = $('#helpWindow'); - Mousetrap.prototype.stopCallback = function() { - return false; - }; + Mousetrap.prototype.stopCallback = () => false; - let openHelpHandler = Logger.shortkeyLogDecorator(function() { - let helpInvisible = helpWindow.hasClass('hidden'); + const openHelpHandler = Logger.shortkeyLogDecorator(() => { + const helpInvisible = helpWindow.hasClass('hidden'); if (helpInvisible) { annotationMenu.addClass('hidden'); settingsWindow.addClass('hidden'); helpWindow.removeClass('hidden'); - } - else { + } else { helpWindow.addClass('hidden'); } return false; }); - let openSettingsHandler = Logger.shortkeyLogDecorator(function() { - let settingsInvisible = settingsWindow.hasClass('hidden'); + const openSettingsHandler = Logger.shortkeyLogDecorator(() => { + const settingsInvisible = settingsWindow.hasClass('hidden'); if (settingsInvisible) { annotationMenu.addClass('hidden'); helpWindow.addClass('hidden'); settingsWindow.removeClass('hidden'); - } - else { + } else { $('#settingsWindow').addClass('hidden'); } return false; }); - let saveHandler = Logger.shortkeyLogDecorator(function() { - let saveButtonLocked = $('#saveButton').prop('disabled'); - if (!saveButtonLocked) { - $('#saveButton').click(); - } - return false; - }); - - let cancelModeHandler = Logger.shortkeyLogDecorator(function() { + const cancelModeHandler = Logger.shortkeyLogDecorator(() => { switch (window.cvat.mode) { case 'aam': models.aam.switchAAMMode(); @@ -433,83 +249,62 @@ function setupShortkeys(shortkeys, models) { case 'poly_editing': models.shapeEditor.finish(); break; + default: + break; } return false; }); - Mousetrap.bind(shortkeys["open_help"].value, openHelpHandler, 'keydown'); - Mousetrap.bind(shortkeys["open_settings"].value, openSettingsHandler, 'keydown'); - Mousetrap.bind(shortkeys["save_work"].value, saveHandler, 'keydown'); - Mousetrap.bind(shortkeys["cancel_mode"].value, cancelModeHandler, 'keydown'); + Mousetrap.bind(shortkeys.open_help.value, openHelpHandler, 'keydown'); + Mousetrap.bind(shortkeys.open_settings.value, openSettingsHandler, 'keydown'); + Mousetrap.bind(shortkeys.cancel_mode.value, cancelModeHandler, 'keydown'); } function setupHelpWindow(shortkeys) { - let closeHelpButton = $('#closeHelpButton'); - let helpTable = $('#shortkeyHelpTable'); + const closeHelpButton = $('#closeHelpButton'); + const helpTable = $('#shortkeyHelpTable'); - closeHelpButton.on('click', function() { + closeHelpButton.on('click', () => { $('#helpWindow').addClass('hidden'); }); - for (let key in shortkeys) { - helpTable.append($(`
|
- + | |
|
- + | |
|
@@ -165,6 +164,11 @@ document.addEventListener('DOMContentLoaded', () => {
$(`#${reidSubmitMergeId}`).on('click', () => {
$(`#${reidWindowId}`).addClass('hidden');
run($(`#${reidOverlay}`), $(`#${reidCancelButtonId}`),
- $(`#${reidThresholdValueId}`), $(`#${reidDistanceValueId}`));
+ $(`#${reidThresholdValueId}`), $(`#${reidDistanceValueId}`))
+ .catch((error) => {
+ setTimeout(() => {
+ throw error;
+ });
+ });
});
});
diff --git a/cvat/apps/tf_annotation/__init__.py b/cvat/apps/tf_annotation/__init__.py
index 2949bfd44427..7c9219c09832 100644
--- a/cvat/apps/tf_annotation/__init__.py
+++ b/cvat/apps/tf_annotation/__init__.py
@@ -5,5 +5,5 @@
from cvat.settings.base import JS_3RDPARTY
-JS_3RDPARTY['dashboard'] = JS_3RDPARTY.get('dashboard', []) + ['tf_annotation/js/tf_annotation.js']
+JS_3RDPARTY['dashboard'] = JS_3RDPARTY.get('dashboard', []) + ['tf_annotation/js/dashboardPlugin.js']
diff --git a/cvat/apps/tf_annotation/static/tf_annotation/js/dashboardPlugin.js b/cvat/apps/tf_annotation/static/tf_annotation/js/dashboardPlugin.js
new file mode 100644
index 000000000000..b0f319a349a6
--- /dev/null
+++ b/cvat/apps/tf_annotation/static/tf_annotation/js/dashboardPlugin.js
@@ -0,0 +1,112 @@
+/*
+ * Copyright (C) 2018 Intel Corporation
+ *
+ * SPDX-License-Identifier: MIT
+ */
+
+/* global
+ userConfirm:false
+ showMessage:false
+*/
+
+window.addEventListener('dashboardReady', () => {
+ function checkProcess(tid, button) {
+ function checkCallback() {
+ $.get(`/tensorflow/annotation/check/task/${tid}`).done((statusData) => {
+ if (['started', 'queued'].includes(statusData.status)) {
+ const progress = Math.round(statusData.progress) || '0';
+ button.text(`Cancel TF Annotation (${progress}%)`);
+ setTimeout(checkCallback, 5000);
+ } else {
+ button.text('Run TF Annotation');
+ button.removeClass('tfAnnotationProcess');
+ button.prop('disabled', false);
+
+ if (statusData.status === 'failed') {
+ const message = `Tensorflow annotation failed. Error: ${statusData.stderr}`;
+ showMessage(message);
+ } else if (statusData.status !== 'finished') {
+ const message = `Tensorflow annotation check request returned status "${statusData.status}"`;
+ showMessage(message);
+ }
+ }
+ }).fail((errorData) => {
+ const message = `Can not sent tensorflow annotation check request. Code: ${errorData.status}. `
+ + `Message: ${errorData.responseText || errorData.statusText}`;
+ showMessage(message);
+ });
+ }
+
+ setTimeout(checkCallback, 5000);
+ }
+
+
+ function runProcess(tid, button) {
+ $.get(`/tensorflow/annotation/create/task/${tid}`).done(() => {
+ showMessage('Process has started');
+ button.text('Cancel TF Annotation (0%)');
+ button.addClass('tfAnnotationProcess');
+ checkProcess(tid, button);
+ }).fail((errorData) => {
+ const message = `Can not run tf annotation. Code: ${errorData.status}. `
+ + `Message: ${errorData.responseText || errorData.statusText}`;
+ showMessage(message);
+ });
+ }
+
+
+ function cancelProcess(tid, button) {
+ $.get(`/tensorflow/annotation/cancel/task/${tid}`).done(() => {
+ button.prop('disabled', true);
+ }).fail((errorData) => {
+ const message = `Can not cancel tf annotation. Code: ${errorData.status}. `
+ + `Message: ${errorData.responseText || errorData.statusText}`;
+ showMessage(message);
+ });
+ }
+
+
+ function setupDashboardItem(item, metaData) {
+ const tid = +item.attr('tid');
+ const button = $('');
+
+ button.on('click', () => {
+ if (button.hasClass('tfAnnotationProcess')) {
+ userConfirm('The process will be canceled. Continue?', () => {
+ cancelProcess(tid, button);
+ });
+ } else {
+ userConfirm('The current annotation will be lost. Are you sure?', () => {
+ runProcess(tid, button);
+ });
+ }
+ });
+
+ button.addClass('dashboardTFAnnotationButton regular dashboardButtonUI');
+ button.appendTo(item.find('div.dashboardButtonsUI'));
+
+ if ((tid in metaData) && (metaData[tid].active)) {
+ button.text('Cancel TF Annotation');
+ button.addClass('tfAnnotationProcess');
+ checkProcess(tid, button);
+ }
+ }
+
+ const elements = $('.dashboardItem');
+ const tids = Array.from(elements, el => +el.getAttribute('tid'));
+
+ $.ajax({
+ type: 'POST',
+ url: '/tensorflow/annotation/meta/get',
+ data: JSON.stringify(tids),
+ contentType: 'application/json; charset=utf-8',
+ }).done((metaData) => {
+ elements.each(function setupDashboardItemWrapper() {
+ setupDashboardItem($(this), metaData);
+ });
+ }).fail((errorData) => {
+ const message = `Can not get tf annotation meta info. Code: ${errorData.status}. `
+ + `Message: ${errorData.responseText || errorData.statusText}`;
+ showMessage(message);
+ });
+});
diff --git a/cvat/apps/tf_annotation/static/tf_annotation/js/tf_annotation.js b/cvat/apps/tf_annotation/static/tf_annotation/js/tf_annotation.js
deleted file mode 100644
index 84ad318d3a4b..000000000000
--- a/cvat/apps/tf_annotation/static/tf_annotation/js/tf_annotation.js
+++ /dev/null
@@ -1,134 +0,0 @@
-/*
- * Copyright (C) 2018 Intel Corporation
- *
- * SPDX-License-Identifier: MIT
- */
-
-/* global
- userConfirm:false
- showMessage:false
-*/
-
-"use strict";
-
-function CheckTFAnnotationRequest(taskId, tfAnnotationButton) {
- let frequence = 5000;
- let errorCount = 0;
- let interval = setInterval(function() {
- $.ajax ({
- url: '/tensorflow/annotation/check/task/' + taskId,
- success: function(jsonData) {
- let status = jsonData["status"];
- if (status == "started" || status == "queued") {
- let progress = Math.round(jsonData["progress"]) || "0";
- tfAnnotationButton.text(`Cancel TF Annotation (${progress}%)`);
- }
- else {
- tfAnnotationButton.text("Run TF Annotation");
- tfAnnotationButton.removeClass("tfAnnotationProcess");
- tfAnnotationButton.prop("disabled", false);
- clearInterval(interval);
- }
- },
- error: function() {
- errorCount ++;
- if (errorCount > 5) {
- clearInterval(interval);
- tfAnnotationButton.prop("disabled", false);
- tfAnnotationButton.text("Status Check Error");
- throw Error(`TF annotation check request error for task ${window.cvat.dashboard.taskID}:${window.cvat.dashboard.taskName}`);
- }
- }
- });
- }, frequence);
-}
-
-function RunTFAnnotationRequest() {
- let tfAnnotationButton = this;
- let taskID = window.cvat.dashboard.taskID;
- $.ajax ({
- url: '/tensorflow/annotation/create/task/' + taskID,
- success: function() {
- showMessage('Process started.');
- tfAnnotationButton.text(`Cancel TF Annotation (0%)`);
- tfAnnotationButton.addClass("tfAnnotationProcess");
- CheckTFAnnotationRequest(taskID, tfAnnotationButton);
- },
- error: function(response) {
- let message = 'Abort. Reason: ' + response.responseText;
- showMessage(message);
- }
- });
-}
-
-function CancelTFAnnotationRequest() {
- let tfAnnotationButton = this;
- $.ajax ({
- url: '/tensorflow/annotation/cancel/task/' + window.cvat.dashboard.taskID,
- success: function() {
- tfAnnotationButton.prop("disabled", true);
- },
- error: function(data) {
- let message = `TF annotation cancel error: ${data.responseText}`;
- showMessage(message);
- }
- });
-}
-
-function onTFAnnotationClick() {
- let button = this;
- let uiElem = button.closest('div.dashboardTaskUI');
- let taskId = +uiElem.attr('id').split('_')[1];
- let taskName = $.trim($( uiElem.find('label.dashboardTaskNameLabel')[0] ).text());
-
- window.cvat.dashboard.taskID = taskId;
- window.cvat.dashboard.taskName = taskName;
-
- if (button.hasClass("tfAnnotationProcess")) {
- userConfirm('The process will be canceled. Continue?', CancelTFAnnotationRequest.bind(button));
- }
- else {
- userConfirm('The current annotation will be lost. Are you sure?', RunTFAnnotationRequest.bind(button));
- }
-}
-
-window.cvat = window.cvat || {};
-window.cvat.dashboard = window.cvat.dashboard || {};
-window.cvat.dashboard.uiCallbacks = window.cvat.dashboard.uiCallbacks || [];
-
-window.cvat.dashboard.uiCallbacks.push(function(newElements) {
- let tids = [];
- for (let el of newElements) {
- tids.push(el.id.split('_')[1]);
- }
-
- $.ajax({
- type: 'POST',
- url: '/tensorflow/annotation/meta/get',
- data: JSON.stringify(tids),
- contentType: "application/json; charset=utf-8",
- success: (data) => {
- newElements.each(function(idx) {
- let elem = $(newElements[idx]);
- let tid = +elem.attr('id').split('_')[1];
- let buttonsUI = elem.find('div.dashboardButtonsUI')[0];
- let tfAnnotationButton = $('');
-
- tfAnnotationButton.on('click', onTFAnnotationClick.bind(tfAnnotationButton));
- tfAnnotationButton.addClass('dashboardTFAnnotationButton regular dashboardButtonUI');
- tfAnnotationButton.appendTo(buttonsUI);
-
- if ((tid in data) && (data[tid].active)) {
- tfAnnotationButton.text("Cancel TF Annotation");
- tfAnnotationButton.addClass("tfAnnotationProcess");
- CheckTFAnnotationRequest(tid, tfAnnotationButton);
- }
- });
- },
- error: (data) => {
- let message = `Can not get tf annotation meta info. Code: ${data.status}. Message: ${data.responseText || data.statusText}`;
- showMessage(message);
- throw Error(message);
- }
- });
-});
diff --git a/cvat/apps/tf_annotation/views.py b/cvat/apps/tf_annotation/views.py
index 729da1fc0da0..c98c56765e6f 100644
--- a/cvat/apps/tf_annotation/views.py
+++ b/cvat/apps/tf_annotation/views.py
@@ -10,7 +10,8 @@
from cvat.apps.authentication.decorators import login_required
from cvat.apps.engine.models import Task as TaskModel
from cvat.apps.engine import annotation, task
-
+from cvat.apps.engine.serializers import LabeledDataSerializer
+from cvat.apps.engine.annotation import put_task_data
import django_rq
import fnmatch
@@ -168,44 +169,30 @@ def get_image_key(item):
def convert_to_cvat_format(data):
- def create_anno_container():
- return {
- "boxes": [],
- "polygons": [],
- "polylines": [],
- "points": [],
- "box_paths": [],
- "polygon_paths": [],
- "polyline_paths": [],
- "points_paths": [],
- }
-
result = {
- 'create': create_anno_container(),
- 'update': create_anno_container(),
- 'delete': create_anno_container(),
+ "tracks": [],
+ "shapes": [],
+ "tags": [],
+ "version": 0,
}
for label in data:
boxes = data[label]
for box in boxes:
- result['create']['boxes'].append({
+ result['shapes'].append({
+ "type": "rectangle",
"label_id": label,
"frame": box[0],
- "xtl": box[1],
- "ytl": box[2],
- "xbr": box[3],
- "ybr": box[4],
+ "points": [box[1], box[2], box[3], box[4]],
"z_order": 0,
- "group_id": 0,
+ "group": None,
"occluded": False,
"attributes": [],
- "id": -1,
})
return result
-def create_thread(tid, labels_mapping):
+def create_thread(tid, labels_mapping, user):
try:
TRESHOLD = 0.5
# Init rq job
@@ -228,14 +215,16 @@ def create_thread(tid, labels_mapping):
# Modify data format and save
result = convert_to_cvat_format(result)
- annotation.clear_task(tid)
- annotation.save_task(tid, result)
+ serializer = LabeledDataSerializer(data = result)
+ if serializer.is_valid(raise_exception=True):
+ put_task_data(tid, user, result)
slogger.glob.info('tf annotation for task {} done'.format(tid))
- except:
+ except Exception as ex:
try:
slogger.task[tid].exception('exception was occured during tf annotation of the task', exc_info=True)
except:
slogger.glob.exception('exception was occured during tf annotation of the task {}'.format(tid), exc_into=True)
+ raise ex
@login_required
def get_meta_info(request):
@@ -301,7 +290,7 @@ def create(request, tid):
# Run tf annotation job
queue.enqueue_call(func=create_thread,
- args=(tid, labels_mapping),
+ args=(tid, labels_mapping, request.user),
job_id='tf_annotation.create/{}'.format(tid),
timeout=604800) # 7 days
@@ -338,6 +327,7 @@ def check(request, tid):
job.delete()
else:
data['status'] = 'failed'
+ data['stderr'] = job.exc_info
job.delete()
except Exception:
diff --git a/cvat/requirements/all.txt b/cvat/requirements/all.txt
new file mode 100644
index 000000000000..ec7a170b14a9
--- /dev/null
+++ b/cvat/requirements/all.txt
@@ -0,0 +1,4 @@
+-r development.txt
+-r production.txt
+-r staging.txt
+-r testing.txt
diff --git a/cvat/requirements/base.txt b/cvat/requirements/base.txt
index 5d1dee3d8ac2..e7e82c297662 100644
--- a/cvat/requirements/base.txt
+++ b/cvat/requirements/base.txt
@@ -1,14 +1,14 @@
click==6.7
-Django==2.1.5
+Django==2.1.7
django-appconf==1.0.2
django-auth-ldap==1.4.0
django-cacheops==4.0.6
django-compressor==2.2
-django-rq==1.3.0
+django-rq==2.0.0
EasyProcess==0.2.3
ffmpy==0.2.2
Pillow==5.1.0
-numpy==1.14.2
+numpy==1.16.2
patool==1.12
python-ldap==3.0.0
pytz==2018.3
@@ -17,8 +17,8 @@ rcssmin==1.0.6
redis==3.2.0
requests==2.20.0
rjsmin==1.0.12
-rq==0.13.0
-scipy==1.0.1
+rq==1.0.0
+scipy==1.2.1
sqlparse==0.2.4
django-sendfile==0.3.11
dj-pagination==2.4.0
@@ -26,3 +26,10 @@ python-logstash==0.4.6
django-revproxy==0.9.15
rules==2.0
GitPython==2.1.11
+coreapi==2.3.3
+django-filter==2.0.0
+Markdown==3.0.1
+djangorestframework==3.9.0
+Pygments==2.3.1
+drf-yasg==1.15.0
+Shapely==1.6.4.post2
diff --git a/cvat/requirements/testing.txt b/cvat/requirements/testing.txt
new file mode 100644
index 000000000000..cb208059a91e
--- /dev/null
+++ b/cvat/requirements/testing.txt
@@ -0,0 +1,2 @@
+-f development.txt
+fakeredis==1.0.3
\ No newline at end of file
diff --git a/cvat/settings/base.py b/cvat/settings/base.py
index 7ba1e668664d..26a66cfc5a4f 100644
--- a/cvat/settings/base.py
+++ b/cvat/settings/base.py
@@ -99,8 +99,31 @@ def generate_ssh_keys():
'dj_pagination',
'revproxy',
'rules',
+ 'rest_framework',
+ 'django_filters',
+ 'drf_yasg',
]
+REST_FRAMEWORK = {
+ 'DEFAULT_PERMISSION_CLASSES': [
+ 'rest_framework.permissions.IsAuthenticated',
+ ],
+ 'DEFAULT_VERSIONING_CLASS':
+ # Don't try to use URLPathVersioning. It will give you /api/{version}
+ # in path and '/api/docs' will not collapse similar items (flat list
+ # of all possible methods isn't readable).
+ 'rest_framework.versioning.NamespaceVersioning',
+ # Need to add 'api-docs' here as a workaround for include_docs_urls.
+ 'ALLOWED_VERSIONS': ('v1', 'api-docs'),
+ 'DEFAULT_PAGINATION_CLASS':
+ 'rest_framework.pagination.PageNumberPagination',
+ 'PAGE_SIZE': 10,
+ 'DEFAULT_FILTER_BACKENDS': (
+ 'rest_framework.filters.SearchFilter',
+ 'django_filters.rest_framework.DjangoFilterBackend',
+ 'rest_framework.filters.OrderingFilter')
+}
+
if 'yes' == os.environ.get('TF_ANNOTATION', 'no'):
INSTALLED_APPS += ['cvat.apps.tf_annotation']
diff --git a/cvat/settings/development.py b/cvat/settings/development.py
index 8c1b8ee818c7..9205c8d51d7c 100644
--- a/cvat/settings/development.py
+++ b/cvat/settings/development.py
@@ -11,6 +11,8 @@
'django_extensions',
]
+ALLOWED_HOSTS.append('testserver')
+
# Django-sendfile:
# https://github.com/johnsensible/django-sendfile
SENDFILE_BACKEND = 'sendfile.backends.development'
diff --git a/cvat/settings/staging.py b/cvat/settings/staging.py
index 46ddeb8c150a..bae862d9ce94 100644
--- a/cvat/settings/staging.py
+++ b/cvat/settings/staging.py
@@ -1,4 +1,3 @@
-
# Copyright (C) 2018 Intel Corporation
#
# SPDX-License-Identifier: MIT
@@ -30,7 +29,7 @@
SILKY_MAX_RECORDED_REQUESTS = 10**4
def SILKY_INTERCEPT_FUNC(request):
# Ignore all requests which try to get a frame (too many of them)
- if request.method == 'GET' and '/frame/' in request.path:
+ if request.method == 'GET' and '/frames/' in request.path:
return False
return True
diff --git a/cvat/settings/testing.py b/cvat/settings/testing.py
new file mode 100644
index 000000000000..4a0560f78578
--- /dev/null
+++ b/cvat/settings/testing.py
@@ -0,0 +1,51 @@
+# Copyright (C) 2018 Intel Corporation
+#
+# SPDX-License-Identifier: MIT
+
+from .development import *
+import tempfile
+
+_temp_dir = tempfile.TemporaryDirectory(suffix="cvat")
+
+DATA_ROOT = os.path.join(_temp_dir.name, 'data')
+os.makedirs(DATA_ROOT, exist_ok=True)
+SHARE_ROOT = os.path.join(_temp_dir.name, 'share')
+os.makedirs(SHARE_ROOT, exist_ok=True)
+
+# To avoid ERROR django.security.SuspiciousFileOperation:
+# The joined path (...) is located outside of the base path component
+MEDIA_ROOT = _temp_dir.name
+
+# Suppress all logs by default
+for logger in LOGGING["loggers"].values():
+ if isinstance(logger, dict) and "level" in logger:
+ logger["level"] = "ERROR"
+
+LOGGING["handlers"]["server_file"] = LOGGING["handlers"]["console"]
+
+PASSWORD_HASHERS = (
+ 'django.contrib.auth.hashers.MD5PasswordHasher',
+)
+
+# When you run ./manage.py test, Django looks at the TEST_RUNNER setting to
+# determine what to do. By default, TEST_RUNNER points to
+# 'django.test.runner.DiscoverRunner'. This class defines the default Django
+# testing behavior.
+TEST_RUNNER = "cvat.settings.testing.PatchedDiscoverRunner"
+
+from django.test.runner import DiscoverRunner
+class PatchedDiscoverRunner(DiscoverRunner):
+ def __init__(self, *args, **kwargs):
+ # Used fakeredis for testing (don't affect production redis)
+ from fakeredis import FakeRedis, FakeStrictRedis
+ import django_rq.queues
+ simple_redis = FakeRedis()
+ strict_redis = FakeStrictRedis()
+ django_rq.queues.get_redis_connection = lambda _, strict: strict_redis \
+ if strict else simple_redis
+
+ # Run all RQ requests syncroniously
+ for config in RQ_QUEUES.values():
+ config["ASYNC"] = False
+
+ super().__init__(*args, **kwargs)
\ No newline at end of file
diff --git a/cvat/simpleworker.py b/cvat/simpleworker.py
index 6f2e38624fee..15265b062902 100644
--- a/cvat/simpleworker.py
+++ b/cvat/simpleworker.py
@@ -25,4 +25,4 @@ def main_work_horse(self, *args, **kwargs):
def execute_job(self, *args, **kwargs):
"""Execute job in same thread/process, do not fork()"""
- return self.perform_job(*args, **kwargs)
\ No newline at end of file
+ return self.perform_job(*args, **kwargs)
diff --git a/cvat/utils/version.py b/cvat/utils/version.py
new file mode 100644
index 000000000000..b69c718b4111
--- /dev/null
+++ b/cvat/utils/version.py
@@ -0,0 +1,59 @@
+# Copyright (C) 2019 Intel Corporation
+#
+# SPDX-License-Identifier: MIT
+#
+# Note: It is slightly re-implemented Django version of code. We cannot use
+# get_version from django.utils.version module because get_git_changeset will
+# always return empty value (cwd=repo_dir isn't correct). Also it gives us a
+# way to define the version as we like.
+
+import datetime
+import os
+import subprocess
+
+def get_version(version):
+ """Return a PEP 440-compliant version number from VERSION."""
+ # Now build the two parts of the version number:
+ # main = X.Y[.Z]
+ # sub = .devN - for pre-alpha releases
+ # | {a|b|rc}N - for alpha, beta, and rc releases
+
+ main = get_main_version(version)
+
+ sub = ''
+ if version[3] == 'alpha' and version[4] == 0:
+ git_changeset = get_git_changeset()
+ if git_changeset:
+ sub = '.dev%s' % git_changeset
+
+ elif version[3] != 'final':
+ mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'rc'}
+ sub = mapping[version[3]] + str(version[4])
+
+ return main + sub
+
+def get_main_version(version):
+ """Return main version (X.Y[.Z]) from VERSION."""
+ parts = 2 if version[2] == 0 else 3
+ return '.'.join(str(x) for x in version[:parts])
+
+def get_git_changeset():
+ """Return a numeric identifier of the latest git changeset.
+
+ The result is the UTC timestamp of the changeset in YYYYMMDDHHMMSS format.
+ This value isn't guaranteed to be unique, but collisions are very unlikely,
+ so it's sufficient for generating the development version numbers.
+ """
+ repo_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+ git_log = subprocess.Popen(
+ 'git log --pretty=format:%ct --quiet -1 HEAD',
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE,
+ shell=True, cwd=repo_dir, universal_newlines=True,
+ )
+ timestamp = git_log.communicate()[0]
+ try:
+ timestamp = datetime.datetime.utcfromtimestamp(int(timestamp))
+ except ValueError:
+ return None
+ return timestamp.strftime('%Y%m%d%H%M%S')
+
diff --git a/tests/karma.conf.js b/tests/karma.conf.js
index 1d31075604a8..8e28f6566b0d 100644
--- a/tests/karma.conf.js
+++ b/tests/karma.conf.js
@@ -10,7 +10,6 @@ module.exports = function(config) {
basePath: path.join(process.env.HOME, 'cvat/apps/'),
frameworks: ['qunit'],
files: [
- 'engine/static/engine/js/idGenerator.js',
'engine/static/engine/js/labelsInfo.js',
'engine/static/engine/js/annotationParser.js',
'engine/static/engine/js/listener.js',
@@ -40,7 +39,7 @@ module.exports = function(config) {
coverageReporter: {
dir: path.join(process.env.HOME, 'media/coverage'),
reporters: [
- { type: 'html', subdir: '.' }, { type: 'lcov', subdir: '.' }
+ { type: 'html', subdir: '.' }, { type: 'lcov', subdir: '.' }
],
instrumenterOptions: {
istanbul: { noCompact: true }