From 5dbd3b0f77608d2ad5a266dcc1ae395a11671656 Mon Sep 17 00:00:00 2001 From: line0 Date: Thu, 14 Jan 2016 02:39:02 +0100 Subject: [PATCH] consistently use LF EOL --- modules/DependencyControl/ConfigHandler.moon | 658 +++---- modules/DependencyControl/FileOps.moon | 604 +++---- modules/DependencyControl/UnitTestSuite.moon | 1690 +++++++++--------- modules/DependencyControl/UpdateFeed.moon | 474 ++--- modules/DependencyControl/Updater.moon | 1026 +++++------ 5 files changed, 2226 insertions(+), 2226 deletions(-) diff --git a/modules/DependencyControl/ConfigHandler.moon b/modules/DependencyControl/ConfigHandler.moon index b0cac7c..a791390 100644 --- a/modules/DependencyControl/ConfigHandler.moon +++ b/modules/DependencyControl/ConfigHandler.moon @@ -1,330 +1,330 @@ -lfs = require "lfs" -util = require "aegisub.util" -json = require "json" -PreciseTimer = require "PT.PreciseTimer" -Logger = require "l0.DependencyControl.Logger" -fileOps = require "l0.DependencyControl.FileOps" -mutex = require "BM.BadMutex" - -class ConfigHandler - @handlers = {} - errors = { - jsonDecode: "JSON parse error: %s" - configCorrupted: [[An error occured while parsing the JSON config file. -A backup of the corrupted configuration has been written to '%s'. -Reload your automation scripts to generate a new configuration file.]] - badKey: "Can't %s section because the key #%d (%s) leads to a %s." - jsonRoot: "JSON root element must be an array or a hashtable, got a %s." - noFile: "No config file defined." - failedLock: "Failed to lock config file for %s: %s" - waitLockFailed: "Error waiting for existing lock to be released: %s" - forceReleaseFailed: "Failed to force-release existing lock after timeout had passed (%s)" - noLock: "#{@@__name} doesn't have a lock" - writeFailedRead: "Failed reading config file: %s." - lockTimeout: "Timeout reached while waiting for write lock." - } - traceMsgs = { - -- waitingLockPre: "Waiting %d ms before trying to get a lock..." - waitingLock: "Waiting for config file lock to be released (%d ms passed)... " - waitingLockFinished: "Lock was released after %d ms." - mergeSectionStart: "Merging own section into configuration. Own Section: %s\nConfiguration: %s" - mergeSectionResult: "Merge completed with result: %s" - fileNotFound: "Couldn't find config file '%s'." - fileCreate: "Config file '%s' doesn't exist, yet. Will write a fresh copy containing the current configuration section." - writing: "Writing config file '%s'..." - -- waitingLockTimeout: "Timeout was reached after %d seconds, force-releasing lock..." - } - - new: (@file, defaults, @section, noLoad, @logger = Logger fileBaseName: @@__name) => - @section = {@section} if "table" != type @section - @defaults = defaults and util.deep_copy(defaults) or {} - -- register all handlers for concerted writing - @setFile @file - - -- set up user configuration and make defaults accessible - @userConfig = {} - @config = setmetatable {}, { - __index: (_, k) -> - if @userConfig and @userConfig[k] ~= nil - return @userConfig[k] - else return @defaults[k] - __newindex: (_, k, v) -> - @userConfig or= {} - @userConfig[k] = v - __len: (tbl) -> return 0 - __ipairs: (tbl) -> error "numerically indexed config hive keys are not supported" - __pairs: (tbl) -> - merged = util.copy @defaults - merged[k] = v for k, v in pairs @userConfig - return next, merged - } - @c = @config -- shortcut - - -- rig defaults in a way that writing to contained tables deep-copies the whole default - -- into the user configuration and sets the requested property there - recurse = (tbl) -> - for k,v in pairs tbl - continue if type(v)~="table" or type(k)=="string" and k\match "^__" - -- replace every table reference with an empty proxy table - -- this ensures all writes to the table get intercepted - tbl[k] = setmetatable {__key: k, __parent: tbl, __tbl: v}, { - -- make the original table the index of the proxy so that defaults can be read - __index: v - __len: (tbl) -> return #tbl.__tbl - __newindex: (tbl, k, v) -> - upKeys, parent = {}, tbl.__parent - -- trace back to defaults entry, pick up the keys along the path - while parent.__parent - tbl = parent - upKeys[#upKeys+1] = tbl.__key - parent = tbl.__parent - - -- deep copy the whole defaults node into the user configuration - -- (util.deep_copy does not copy attached metatable references) - -- make sure we copy the actual table, not the proxy - @userConfig or= {} - @userConfig[tbl.__key] = util.deep_copy @defaults[tbl.__key].__tbl - -- finally perform requested write on userdata - tbl = @userConfig[tbl.__key] - for i = #upKeys-1, 1, -1 - tbl = tbl[upKeys[i]] - tbl[k] = v - __pairs: (tbl) -> return next, tbl.__tbl - __ipairs: (tbl) -> - i, n, orgTbl = 0, #tbl.__tbl, tbl.__tbl - -> - i += 1 - return i, orgTbl[i] if i <= n - } - recurse tbl[k] - - recurse @defaults - @load! unless noLoad - - setFile: (path) => - return false unless path - if @@handlers[path] - table.insert @@handlers[path], @ - else @@handlers[path] = {@} - path, err = fileOps.validateFullPath path, true - return nil, err unless path - @file = path - return true - - unsetFile: => - handlers = @@handlers[@file] - if handlers and #handlers>1 - @@handlers[@file] = [handler for handler in *handlers when handler != @] - else @@handlers[@file] = nil - @file = nil - return true - - readFile: (file = @file, useLock = true, waitLockTime) => - if useLock - time, err = @getLock waitLockTime - unless time - -- handle\close! - return false, errors.failedLock\format "reading", err - - mode, file = fileOps.attributes file, "mode" - if mode == nil - @releaseLock! if useLock - return false, file - elseif not mode - @releaseLock! if useLock - @logger\trace traceMsgs.fileNotFound, @file - return nil - - handle, err = io.open file, "r" - unless handle - @releaseLock! if useLock - return false, err - - data = handle\read "*a" - success, result = pcall json.decode, data - unless success - handle\close! - -- JSON parse error usually points to a corrupted config file - -- Rename the broken file to allow generating a new one - -- so the user can continue his work - @logger\trace errors.jsonDecode, result - backup = @file .. ".corrupted" - fileOps.copy @file, backup - fileOps.remove @file, false, true - - @releaseLock! if useLock - return false, errors.configCorrupted\format backup - - handle\close! - @releaseLock! if useLock - - if "table" != type result - return false, errors.jsonRoot\format type result - - return result - - load: => - return false, errors.noFile unless @file - - config, err = @readFile! - return config, err unless config - - sectionExists = true - for i=1, #@section - config = config[@section[i]] - switch type config - when "table" continue - when "nil" - config, sectionExists = {}, false - break - else return false, errors.badKey\format "retrive", i, tostring(@section[i]),type config - - @userConfig or= {} - @userConfig[k] = v for k,v in pairs config - return sectionExists - - mergeSection: (config) => - --@logger\trace traceMsgs.mergeSectionStart, @logger\dumpToString(@section), - -- @logger\dumpToString config - - section, sectionExists = config, true - -- create missing parent sections - for i=1, #@section - childSection = section[@section[i]] - if childSection == nil - -- don't create parent sections if this section is going to be deleted - unless @userConfig - sectionExists = false - break - section[@section[i]] = {} - childSection = section[@section[i]] - elseif "table" != type childSection - return false, errors.badKey\format "update", i, tostring(@section[i]),type childSection - section = childSection if @userConfig or i < #@section - -- merge our values into our section - if @userConfig - section[k] = v for k,v in pairs @userConfig - elseif sectionExists - section[@section[#@section]] = nil - - -- @logger\trace traceMsgs.mergeSectionResult, @logger\dumpToString config - return config - - delete: (concertWrite, waitLockTime) => - @userConfig = nil - return @write concertWrite, waitLockTime - - write: (concertWrite, waitLockTime) => - return false, errors.noFile unless @file - - -- get a lock to avoid concurrent config file access - time, err = @getLock waitLockTime - unless time - return false, errors.failedLock\format "writing", err - - -- read the config file - config, err = @readFile @file, false - if config == false - @releaseLock! - return false, errors.writeFailedRead\format err - @logger\trace traceMsgs.fileCreate, @file unless config - config or= {} - - -- merge in our section - -- concerted writing allows us to update a configuration file - -- shared by multiple handlers in the lua environment - handlers = concertWrite and @@handlers[@file] or {@} - for handler in *handlers - config, err = handler\mergeSection config - unless config - @releaseLock! - return false, err - - -- create JSON - success, res = pcall json.encode, config - unless success - @releaseLock! - return false, res - - -- write the whole config file in one go - handle, err = io.open(@file, "w") - unless handle - @releaseLock! - return false, err - - @logger\trace traceMsgs.writing, @file - handle\setvbuf "full", 10e6 - handle\write res - handle\flush! - handle\close! - @releaseLock! - - return true - - getLock: (waitTimeout = 5000, checkInterval = 50) => - return 0 if @hasLock - success = mutex.tryLock! - if success - @hasLock = true - return 0 - - timeout, timePassed = waitTimeout, 0 - while not success and timeout > 0 - PreciseTimer.sleep checkInterval - success = mutex.tryLock! - timeout -= checkInterval - timePassed = waitTimeout - timeout - if timePassed % (checkInterval*5) == 0 - @logger\trace traceMsgs.waitingLock, timePassed - - if success - @logger\trace traceMsgs.waitingLockFinished, timePassed - @hasLock = true - return timePassed - else - -- @logger\trace traceMsgs.waitingLockTimeout, waitTimeout/1000 - -- success, err = @releaseLock true - -- unless success - -- return false, errors.forceReleaseFailed\format err - -- @hasLock = true - --return waitTimeout - return false, errors.lockTimeout - - getSectionHandler: (section, defaults, noLoad) => - return @@ @file, defaults, section, noLoad, @logger - - releaseLock: (force) => - if @hasLock or force - @hasLock = false - mutex.unlock! - return true - return false, errors.noLock - - -- copied from Aegisub util.moon, adjusted to skip private keys - deepCopy: (tbl) => - seen = {} - copy = (val) -> - return val if type(val) != 'table' - return seen[val] if seen[val] - seen[val] = val - {k, copy(v) for k, v in pairs val when type(k) != "string" or k\sub(1,1) != "_"} - copy tbl - - import: (tbl = {}, keys, updateOnly, skipSameLengthTables) => - tbl = tbl.userConfig if tbl.__class == @@ - changesMade = false - @userConfig or= {} - keys = {key, true for key in *keys} if keys - - for k,v in pairs tbl - continue if keys and not keys[k] or @userConfig[k] == v - continue if updateOnly and @c[k] == nil - -- TODO: deep-compare tables - isTable = type(v) == "table" - if isTable and skipSameLengthTables and type(@userConfig[k]) == "table" and #v == #@userConfig[k] - continue - continue if type(k) == "string" and k\sub(1,1) == "_" - @userConfig[k] = isTable and @deepCopy(v) or v - changesMade = true - +lfs = require "lfs" +util = require "aegisub.util" +json = require "json" +PreciseTimer = require "PT.PreciseTimer" +Logger = require "l0.DependencyControl.Logger" +fileOps = require "l0.DependencyControl.FileOps" +mutex = require "BM.BadMutex" + +class ConfigHandler + @handlers = {} + errors = { + jsonDecode: "JSON parse error: %s" + configCorrupted: [[An error occured while parsing the JSON config file. +A backup of the corrupted configuration has been written to '%s'. +Reload your automation scripts to generate a new configuration file.]] + badKey: "Can't %s section because the key #%d (%s) leads to a %s." + jsonRoot: "JSON root element must be an array or a hashtable, got a %s." + noFile: "No config file defined." + failedLock: "Failed to lock config file for %s: %s" + waitLockFailed: "Error waiting for existing lock to be released: %s" + forceReleaseFailed: "Failed to force-release existing lock after timeout had passed (%s)" + noLock: "#{@@__name} doesn't have a lock" + writeFailedRead: "Failed reading config file: %s." + lockTimeout: "Timeout reached while waiting for write lock." + } + traceMsgs = { + -- waitingLockPre: "Waiting %d ms before trying to get a lock..." + waitingLock: "Waiting for config file lock to be released (%d ms passed)... " + waitingLockFinished: "Lock was released after %d ms." + mergeSectionStart: "Merging own section into configuration. Own Section: %s\nConfiguration: %s" + mergeSectionResult: "Merge completed with result: %s" + fileNotFound: "Couldn't find config file '%s'." + fileCreate: "Config file '%s' doesn't exist, yet. Will write a fresh copy containing the current configuration section." + writing: "Writing config file '%s'..." + -- waitingLockTimeout: "Timeout was reached after %d seconds, force-releasing lock..." + } + + new: (@file, defaults, @section, noLoad, @logger = Logger fileBaseName: @@__name) => + @section = {@section} if "table" != type @section + @defaults = defaults and util.deep_copy(defaults) or {} + -- register all handlers for concerted writing + @setFile @file + + -- set up user configuration and make defaults accessible + @userConfig = {} + @config = setmetatable {}, { + __index: (_, k) -> + if @userConfig and @userConfig[k] ~= nil + return @userConfig[k] + else return @defaults[k] + __newindex: (_, k, v) -> + @userConfig or= {} + @userConfig[k] = v + __len: (tbl) -> return 0 + __ipairs: (tbl) -> error "numerically indexed config hive keys are not supported" + __pairs: (tbl) -> + merged = util.copy @defaults + merged[k] = v for k, v in pairs @userConfig + return next, merged + } + @c = @config -- shortcut + + -- rig defaults in a way that writing to contained tables deep-copies the whole default + -- into the user configuration and sets the requested property there + recurse = (tbl) -> + for k,v in pairs tbl + continue if type(v)~="table" or type(k)=="string" and k\match "^__" + -- replace every table reference with an empty proxy table + -- this ensures all writes to the table get intercepted + tbl[k] = setmetatable {__key: k, __parent: tbl, __tbl: v}, { + -- make the original table the index of the proxy so that defaults can be read + __index: v + __len: (tbl) -> return #tbl.__tbl + __newindex: (tbl, k, v) -> + upKeys, parent = {}, tbl.__parent + -- trace back to defaults entry, pick up the keys along the path + while parent.__parent + tbl = parent + upKeys[#upKeys+1] = tbl.__key + parent = tbl.__parent + + -- deep copy the whole defaults node into the user configuration + -- (util.deep_copy does not copy attached metatable references) + -- make sure we copy the actual table, not the proxy + @userConfig or= {} + @userConfig[tbl.__key] = util.deep_copy @defaults[tbl.__key].__tbl + -- finally perform requested write on userdata + tbl = @userConfig[tbl.__key] + for i = #upKeys-1, 1, -1 + tbl = tbl[upKeys[i]] + tbl[k] = v + __pairs: (tbl) -> return next, tbl.__tbl + __ipairs: (tbl) -> + i, n, orgTbl = 0, #tbl.__tbl, tbl.__tbl + -> + i += 1 + return i, orgTbl[i] if i <= n + } + recurse tbl[k] + + recurse @defaults + @load! unless noLoad + + setFile: (path) => + return false unless path + if @@handlers[path] + table.insert @@handlers[path], @ + else @@handlers[path] = {@} + path, err = fileOps.validateFullPath path, true + return nil, err unless path + @file = path + return true + + unsetFile: => + handlers = @@handlers[@file] + if handlers and #handlers>1 + @@handlers[@file] = [handler for handler in *handlers when handler != @] + else @@handlers[@file] = nil + @file = nil + return true + + readFile: (file = @file, useLock = true, waitLockTime) => + if useLock + time, err = @getLock waitLockTime + unless time + -- handle\close! + return false, errors.failedLock\format "reading", err + + mode, file = fileOps.attributes file, "mode" + if mode == nil + @releaseLock! if useLock + return false, file + elseif not mode + @releaseLock! if useLock + @logger\trace traceMsgs.fileNotFound, @file + return nil + + handle, err = io.open file, "r" + unless handle + @releaseLock! if useLock + return false, err + + data = handle\read "*a" + success, result = pcall json.decode, data + unless success + handle\close! + -- JSON parse error usually points to a corrupted config file + -- Rename the broken file to allow generating a new one + -- so the user can continue his work + @logger\trace errors.jsonDecode, result + backup = @file .. ".corrupted" + fileOps.copy @file, backup + fileOps.remove @file, false, true + + @releaseLock! if useLock + return false, errors.configCorrupted\format backup + + handle\close! + @releaseLock! if useLock + + if "table" != type result + return false, errors.jsonRoot\format type result + + return result + + load: => + return false, errors.noFile unless @file + + config, err = @readFile! + return config, err unless config + + sectionExists = true + for i=1, #@section + config = config[@section[i]] + switch type config + when "table" continue + when "nil" + config, sectionExists = {}, false + break + else return false, errors.badKey\format "retrive", i, tostring(@section[i]),type config + + @userConfig or= {} + @userConfig[k] = v for k,v in pairs config + return sectionExists + + mergeSection: (config) => + --@logger\trace traceMsgs.mergeSectionStart, @logger\dumpToString(@section), + -- @logger\dumpToString config + + section, sectionExists = config, true + -- create missing parent sections + for i=1, #@section + childSection = section[@section[i]] + if childSection == nil + -- don't create parent sections if this section is going to be deleted + unless @userConfig + sectionExists = false + break + section[@section[i]] = {} + childSection = section[@section[i]] + elseif "table" != type childSection + return false, errors.badKey\format "update", i, tostring(@section[i]),type childSection + section = childSection if @userConfig or i < #@section + -- merge our values into our section + if @userConfig + section[k] = v for k,v in pairs @userConfig + elseif sectionExists + section[@section[#@section]] = nil + + -- @logger\trace traceMsgs.mergeSectionResult, @logger\dumpToString config + return config + + delete: (concertWrite, waitLockTime) => + @userConfig = nil + return @write concertWrite, waitLockTime + + write: (concertWrite, waitLockTime) => + return false, errors.noFile unless @file + + -- get a lock to avoid concurrent config file access + time, err = @getLock waitLockTime + unless time + return false, errors.failedLock\format "writing", err + + -- read the config file + config, err = @readFile @file, false + if config == false + @releaseLock! + return false, errors.writeFailedRead\format err + @logger\trace traceMsgs.fileCreate, @file unless config + config or= {} + + -- merge in our section + -- concerted writing allows us to update a configuration file + -- shared by multiple handlers in the lua environment + handlers = concertWrite and @@handlers[@file] or {@} + for handler in *handlers + config, err = handler\mergeSection config + unless config + @releaseLock! + return false, err + + -- create JSON + success, res = pcall json.encode, config + unless success + @releaseLock! + return false, res + + -- write the whole config file in one go + handle, err = io.open(@file, "w") + unless handle + @releaseLock! + return false, err + + @logger\trace traceMsgs.writing, @file + handle\setvbuf "full", 10e6 + handle\write res + handle\flush! + handle\close! + @releaseLock! + + return true + + getLock: (waitTimeout = 5000, checkInterval = 50) => + return 0 if @hasLock + success = mutex.tryLock! + if success + @hasLock = true + return 0 + + timeout, timePassed = waitTimeout, 0 + while not success and timeout > 0 + PreciseTimer.sleep checkInterval + success = mutex.tryLock! + timeout -= checkInterval + timePassed = waitTimeout - timeout + if timePassed % (checkInterval*5) == 0 + @logger\trace traceMsgs.waitingLock, timePassed + + if success + @logger\trace traceMsgs.waitingLockFinished, timePassed + @hasLock = true + return timePassed + else + -- @logger\trace traceMsgs.waitingLockTimeout, waitTimeout/1000 + -- success, err = @releaseLock true + -- unless success + -- return false, errors.forceReleaseFailed\format err + -- @hasLock = true + --return waitTimeout + return false, errors.lockTimeout + + getSectionHandler: (section, defaults, noLoad) => + return @@ @file, defaults, section, noLoad, @logger + + releaseLock: (force) => + if @hasLock or force + @hasLock = false + mutex.unlock! + return true + return false, errors.noLock + + -- copied from Aegisub util.moon, adjusted to skip private keys + deepCopy: (tbl) => + seen = {} + copy = (val) -> + return val if type(val) != 'table' + return seen[val] if seen[val] + seen[val] = val + {k, copy(v) for k, v in pairs val when type(k) != "string" or k\sub(1,1) != "_"} + copy tbl + + import: (tbl = {}, keys, updateOnly, skipSameLengthTables) => + tbl = tbl.userConfig if tbl.__class == @@ + changesMade = false + @userConfig or= {} + keys = {key, true for key in *keys} if keys + + for k,v in pairs tbl + continue if keys and not keys[k] or @userConfig[k] == v + continue if updateOnly and @c[k] == nil + -- TODO: deep-compare tables + isTable = type(v) == "table" + if isTable and skipSameLengthTables and type(@userConfig[k]) == "table" and #v == #@userConfig[k] + continue + continue if type(k) == "string" and k\sub(1,1) == "_" + @userConfig[k] = isTable and @deepCopy(v) or v + changesMade = true + return changesMade \ No newline at end of file diff --git a/modules/DependencyControl/FileOps.moon b/modules/DependencyControl/FileOps.moon index 65c9f2e..707742b 100644 --- a/modules/DependencyControl/FileOps.moon +++ b/modules/DependencyControl/FileOps.moon @@ -1,303 +1,303 @@ -ffi = require "ffi" -re = require "aegisub.re" -Logger = require "l0.DependencyControl.Logger" -local ConfigHandler - -class FileOps - msgs = { - generic: { - deletionRescheduled: "Another deletion attempt has been rescheduled for the next restart." - } - attributes: { - badPath: "Path failed verification: %s." - genericError: "Can't retrieve attributes: %s." - noAttribute: "Can't find attriubte with name '%s'." - } - - mkdir: { - createError: "Error creating directory: %s." - otherExists: "Couldn't create directory because a %s of the same name is already present." - } - copy: { - targetExists: "Target file '%s' already exists" - genericError: "An error occured while copying file '%s' to '%s':\n%s" - dirCopyUnsupported: "Copying directories is currently not supported." - missingSource: "Couldn't find source file '%s'." - openError: "Couldn't open %s file '%s' for reading: \n%s" - } - move: { - inUseTryingRename: "Target file '%s' already exists and appears to be in use. Trying to rename and delete existing file..." - renamedDeletionFailed: "The existing file was successfully renamed to '%s', but couldn't be deleted (%s).\n%s" - overwritingFile: "File '%s' already exists, overwriting..." - createdDir: "Created target directory '%s'." - exists: "Couldn't move file '%s' to '%s' because a %s of the same name is already present." - genericError: "An error occured while moving file '%s' to '%s':\n%s" - createDirError: "Moving '%s' to '%s' failed (%s)." - cantRemove: "Couldn't overwrite file '%s': %s. Attempts at renaming the existing target file failed." - cantRenameTryingCopy: "Move operation failed to rename '%s' to '%s' (%s), trying copy+remove instead..." - couldntRemoveFiles: "Move operation suceeded to copied the file(s) to the target location, but some of the source files couldn't be removed:\n%s\n%s" - cantCopy: "Move operation failed to copy '%s' to '%s' (%s) after a failed rename attempt (%s)." - } - rmdir: { - emptyPath: "Argument #1 (path) must not be an empty string." - couldntRemoveFiles: "Some of the files and folders in the specified directory couldn't be removed:\n%s" - couldntRemoveDir: "Error removing empty directory: %s." - - } - validateFullPath: { - badType: "Argument #1 (path) had the wrong type. Expected 'string', got '%s'." - tooLong: "The specified path exceeded the maximum length limit (%d > %d)." - invalidChars: "The specifed path contains one or more invalid characters: '%s'." - reservedNames: "The specified path contains reserved path or file names: '%s'." - parentPath: "Accessing parent directories is not allowed." - notFullPath: "The specified path is not a valid full path." - missingExt: "The specified path is missing a file extension." - } - } - - devPattern = ffi.os == "Windows" and "[A-Za-z]:" or "/[^\\\\/]+" - pathMatch = { - sep: ffi.os == "Windows" and "\\" or "/" - pattern: re.compile "^(#{devPattern})((?:[\\\\/][^\\\\/]*[^\\\\/\\s\\.])*)[\\\\/]([^\\\\/]*[^\\\\/\\s\\.])?$" - invalidChars: '[<>:"|%?%*%z%c;]' - reservedNames: re.compile "[\\\\/](CON|COM[1-9]|PRN|AUX|NUL|LPT[1-9])(?:[\\\\/].*?)?$", re.ICASE - maxLen: 255 - } - @logger = Logger! - - createConfig = (noLoad, configDir) -> - FileOps.configDir = configDir if configDir - ConfigHandler or= require "l0.DependencyControl.ConfigHandler" - FileOps.config or= ConfigHandler "#{FileOps.configDir}/l0.#{FileOps.__name}.json", - {toRemove: {}}, nil, noLoad, FileOps.logger - return FileOps.config - - remove: (paths, recurse, reSchedule) -> - config = createConfig true - configLoaded, overallSuccess, details, firstErr = false, true, {} - paths = {paths} unless type(paths) == "table" - - for path in *paths - mode, path = FileOps.attributes path, "mode" - if mode - rmFunc = mode == "file" and os.remove or FileOps.rmdir - res, err = rmFunc path, recurse - unless res - firstErr or= err - unless reSchedule -- delete operation failed entirely - details[path] = {nil, err} - overallSuccess = nil - continue - - -- load the FileOps configuration file and reschedule deletions - unless configLoaded - FileOps.config\load! - configLoaded = true - config.c.toRemove[path] = os.time! - -- mark the operations as failed "for now", indicating a second attempt has been scheduled - details[path] = {false, err} - overallSuccess = false - - -- delete operation succeeded - else details[path] = {true} - -- file not found or permission issue - else details[path] = {nil, err} - - config\write! if configLoaded - return overallSuccess, details, firstErr - - runScheduledRemoval: (configDir) -> - config = createConfig false, configDir - paths = [path for path, _ in pairs config.c.toRemove] - if #paths > 0 - -- rescheduled removals will not be rescheduled another time - FileOps.remove paths, true - config.c.toRemove = {} - config\write! - return true - - copy: ( source, target ) -> - -- source check - mode, sourceFullPath, _, _, fileName = FileOps.attributes source, "mode" - switch mode - when "directory" - return false, msgs.copy.dirCopyUnsupported - when nil - return false, msgs.copy.genericError\format source, target, sourceFullPath - when false - return false, msgs.copy.missingSource\format source - - -- target check - checkTarget = (target) -> - mode, targetFullPath = FileOps.attributes target, "mode" - switch mode - when "file" - return false, msgs.copy.targetExists\format target - when nil - return false, msgs.copy.genericError\format source, target, targetFullPath - when "directory" - target ..= "/#{fileName}" - return checkTarget target - return true, targetFullPath - - success, targetFullPath = checkTarget target - return false, targetFullPath unless success - - input, msg = io.open sourceFullPath, "rb" - unless input - return false, msgs.copy.openError\format "source", sourceFullPath, msg - - output, msg = io.open targetFullPath, "wb" - unless output - input\close! - return false, msgs.copy.openError\format "target", targetFullPath, msg - - success, msg = output\write input\read "*a" - input\close! - output\close! - - if success - return true - else - return false, msgs.copy.genericError\format sourceFullPath, targetFullPath, msg - - - move: (source, target, overwrite) -> - mode, err = FileOps.attributes target, "mode" - if mode == "file" - unless overwrite - return false, msg.move.exists\format source, target, mode - FileOps.logger\trace msgs.move.overwritingFile, target - res, _, err = FileOps.remove target - unless res - -- can't remove old target file, probably in use or lack of permissions - -- try to rename and then delete it - FileOps.logger\debug msgs.move.inUseTryingRename, target - junkName = "#{target}.depCtrlRemoved" - -- There might be an old removed file we couldn't delete before - FileOps.remove junkName - res = os.rename target, junkName - unless res - return false, msgs.move.cantRemove\format target, err - -- rename succeeded, now clean up after ourselves - res, _, err = FileOps.remove junkName, false, true - unless res - FileOps.logger\debug msgs.move.renamedDeletionFailed, junkName, err, msgs.generic.deletionRescheduled - - elseif mode -- a directory (or something else) of the same name as the target file is already present - return false, msgs.move.exists\format source, target, mode - elseif mode == nil -- if retrieving the attributes of a file fails, something is probably wrong - return false, msgs.move.genericError\format source, target, err - - else -- target file not found, check directory - res, dir = FileOps.mkdir target, true - if res == nil - return false, msgs.move.createDirError\format source, target, err - elseif res - FileOps.logger\trace msgs.move.createdDir, dir - - -- at this point the target directory exists and the target file doesn't, move the file - res, err = os.rename source, target - unless res - -- renaming the file failed, could be because of a permission issue - -- but me might a well be trying to rename over file system boundaries on *nix - -- so we should try copy + remove before giving up - FileOps.logger\debug msgs.move.cantRenameTryingCopy, source, target, err - renErr, res, err = err, FileOps.copy source, target - unless res - return false, msgs.move.cantCopy\format source, target, err, renErr - res, details = FileOps.remove source, false, true -- TODO: also support directories/recursion, but also require copy to support it - - unless res - fileList = table.concat ["#{path}: #{res[2]}" for path, res in pairs details when not res[1]], "\n" - FileOps.logger\debug msgs.move.couldntRemoveFiles, fileList, msgs.generic.deletionRescheduled - - return true - - rmdir: (path, recurse = true) -> - return nil, msgs.rmdir.emptyPath if path == "" - mode, path = FileOps.attributes path, "mode" - return nil, msgs.rmdir.notPath unless mode == "directory" - - if recurse - -- recursively remove contained files and directories - toRemove = ["#{path}/#{file}" for file in lfs.dir path] - res, details = FileOps.remove toRemove, true - unless res - fileList = table.concat ["#{path}: #{res[2]}" for path, res in pairs details when not res[1]], "\n" - return nil, msgs.rmdir.couldntRemoveFiles\format fileList - - -- remove empty directory - success, err = lfs.rmdir path - unless success - return nil, msgs.rmdir.couldntRemoveDir\format err - - return true - - mkdir: (path, isFile) -> - mode, fullPath, dev, dir, file = FileOps.attributes path, "mode" - dir = isFile and table.concat({dev,dir or file}) or fullPath - - if mode == nil - return nil, msgs.attributes.genericError\format fullPath - elseif not mode - res, err = lfs.mkdir dir - if err -- can't create directory (possibly a permission error) - return nil, msgs.mkdir.createError\format err - return true, dir - elseif mode != "directory" -- a file of the same name as the target directory is already present - return nil, msgs.mkdir.otherExists\format mode - return false, dir - - attributes: (path, key) -> - fullPath, dev, dir, file = FileOps.validateFullPath path - unless fullPath - path = "#{lfs.currentdir!}/#{path}" - fullPath, dev, dir, file = FileOps.validateFullPath path - unless fullPath - return nil, msgs.attributes.badPath\format dev - - attr, err = lfs.attributes fullPath, key - if err - return nil, msgs.attributes.genericError\format err - elseif not attr - return false, fullPath, dev, dir, file - - return attr, fullPath, dev, dir, file - - validateFullPath: (path, checkFileExt) -> - if type(path) != "string" - return nil, msgs.validateFullPath.badType\format type(path) - -- expand aegisub path specifiers - path = aegisub.decode_path path - -- expand home directory on linux - homeDir = os.getenv "HOME" - path = path\gsub "^~", "{#homeDir}/" if homeDir - -- use single native path separators - path = path\gsub "[\\/]+", pathMatch.sep - -- check length - if #path > pathMatch.maxLen - return false, msgs.validateFullPath.tooLong\format #path, maxLen - -- check for invalid characters - invChar = path\match pathMatch.invalidChars, ffi.os == "Windows" and 3 or nil - if invChar - return false, msgs.validateFullPath.invalidChars\format invChar - -- check for reserved file names - reserved = pathMatch.reservedNames\match path - if reserved - return false, msgs.validateFullPath.reservedNames\format reserved[2].str - -- check for path escalation - if path\match "%.%." - return false, msgs.validateFullPath.parentPath - - -- check if we got a valid full path - matches = pathMatch.pattern\match path - dev, dir, file = matches[2].str, matches[3].str, matches[4].str if matches - unless dev - return false, msgs.validateFullPath.notFullPath - if checkFileExt and not (file and file\match ".+%.+") - return false, msgs.validateFullPath.missingExt - - path = table.concat({dev, dir, file and pathMatch.sep, file}) - +ffi = require "ffi" +re = require "aegisub.re" +Logger = require "l0.DependencyControl.Logger" +local ConfigHandler + +class FileOps + msgs = { + generic: { + deletionRescheduled: "Another deletion attempt has been rescheduled for the next restart." + } + attributes: { + badPath: "Path failed verification: %s." + genericError: "Can't retrieve attributes: %s." + noAttribute: "Can't find attriubte with name '%s'." + } + + mkdir: { + createError: "Error creating directory: %s." + otherExists: "Couldn't create directory because a %s of the same name is already present." + } + copy: { + targetExists: "Target file '%s' already exists" + genericError: "An error occured while copying file '%s' to '%s':\n%s" + dirCopyUnsupported: "Copying directories is currently not supported." + missingSource: "Couldn't find source file '%s'." + openError: "Couldn't open %s file '%s' for reading: \n%s" + } + move: { + inUseTryingRename: "Target file '%s' already exists and appears to be in use. Trying to rename and delete existing file..." + renamedDeletionFailed: "The existing file was successfully renamed to '%s', but couldn't be deleted (%s).\n%s" + overwritingFile: "File '%s' already exists, overwriting..." + createdDir: "Created target directory '%s'." + exists: "Couldn't move file '%s' to '%s' because a %s of the same name is already present." + genericError: "An error occured while moving file '%s' to '%s':\n%s" + createDirError: "Moving '%s' to '%s' failed (%s)." + cantRemove: "Couldn't overwrite file '%s': %s. Attempts at renaming the existing target file failed." + cantRenameTryingCopy: "Move operation failed to rename '%s' to '%s' (%s), trying copy+remove instead..." + couldntRemoveFiles: "Move operation suceeded to copied the file(s) to the target location, but some of the source files couldn't be removed:\n%s\n%s" + cantCopy: "Move operation failed to copy '%s' to '%s' (%s) after a failed rename attempt (%s)." + } + rmdir: { + emptyPath: "Argument #1 (path) must not be an empty string." + couldntRemoveFiles: "Some of the files and folders in the specified directory couldn't be removed:\n%s" + couldntRemoveDir: "Error removing empty directory: %s." + + } + validateFullPath: { + badType: "Argument #1 (path) had the wrong type. Expected 'string', got '%s'." + tooLong: "The specified path exceeded the maximum length limit (%d > %d)." + invalidChars: "The specifed path contains one or more invalid characters: '%s'." + reservedNames: "The specified path contains reserved path or file names: '%s'." + parentPath: "Accessing parent directories is not allowed." + notFullPath: "The specified path is not a valid full path." + missingExt: "The specified path is missing a file extension." + } + } + + devPattern = ffi.os == "Windows" and "[A-Za-z]:" or "/[^\\\\/]+" + pathMatch = { + sep: ffi.os == "Windows" and "\\" or "/" + pattern: re.compile "^(#{devPattern})((?:[\\\\/][^\\\\/]*[^\\\\/\\s\\.])*)[\\\\/]([^\\\\/]*[^\\\\/\\s\\.])?$" + invalidChars: '[<>:"|%?%*%z%c;]' + reservedNames: re.compile "[\\\\/](CON|COM[1-9]|PRN|AUX|NUL|LPT[1-9])(?:[\\\\/].*?)?$", re.ICASE + maxLen: 255 + } + @logger = Logger! + + createConfig = (noLoad, configDir) -> + FileOps.configDir = configDir if configDir + ConfigHandler or= require "l0.DependencyControl.ConfigHandler" + FileOps.config or= ConfigHandler "#{FileOps.configDir}/l0.#{FileOps.__name}.json", + {toRemove: {}}, nil, noLoad, FileOps.logger + return FileOps.config + + remove: (paths, recurse, reSchedule) -> + config = createConfig true + configLoaded, overallSuccess, details, firstErr = false, true, {} + paths = {paths} unless type(paths) == "table" + + for path in *paths + mode, path = FileOps.attributes path, "mode" + if mode + rmFunc = mode == "file" and os.remove or FileOps.rmdir + res, err = rmFunc path, recurse + unless res + firstErr or= err + unless reSchedule -- delete operation failed entirely + details[path] = {nil, err} + overallSuccess = nil + continue + + -- load the FileOps configuration file and reschedule deletions + unless configLoaded + FileOps.config\load! + configLoaded = true + config.c.toRemove[path] = os.time! + -- mark the operations as failed "for now", indicating a second attempt has been scheduled + details[path] = {false, err} + overallSuccess = false + + -- delete operation succeeded + else details[path] = {true} + -- file not found or permission issue + else details[path] = {nil, err} + + config\write! if configLoaded + return overallSuccess, details, firstErr + + runScheduledRemoval: (configDir) -> + config = createConfig false, configDir + paths = [path for path, _ in pairs config.c.toRemove] + if #paths > 0 + -- rescheduled removals will not be rescheduled another time + FileOps.remove paths, true + config.c.toRemove = {} + config\write! + return true + + copy: ( source, target ) -> + -- source check + mode, sourceFullPath, _, _, fileName = FileOps.attributes source, "mode" + switch mode + when "directory" + return false, msgs.copy.dirCopyUnsupported + when nil + return false, msgs.copy.genericError\format source, target, sourceFullPath + when false + return false, msgs.copy.missingSource\format source + + -- target check + checkTarget = (target) -> + mode, targetFullPath = FileOps.attributes target, "mode" + switch mode + when "file" + return false, msgs.copy.targetExists\format target + when nil + return false, msgs.copy.genericError\format source, target, targetFullPath + when "directory" + target ..= "/#{fileName}" + return checkTarget target + return true, targetFullPath + + success, targetFullPath = checkTarget target + return false, targetFullPath unless success + + input, msg = io.open sourceFullPath, "rb" + unless input + return false, msgs.copy.openError\format "source", sourceFullPath, msg + + output, msg = io.open targetFullPath, "wb" + unless output + input\close! + return false, msgs.copy.openError\format "target", targetFullPath, msg + + success, msg = output\write input\read "*a" + input\close! + output\close! + + if success + return true + else + return false, msgs.copy.genericError\format sourceFullPath, targetFullPath, msg + + + move: (source, target, overwrite) -> + mode, err = FileOps.attributes target, "mode" + if mode == "file" + unless overwrite + return false, msg.move.exists\format source, target, mode + FileOps.logger\trace msgs.move.overwritingFile, target + res, _, err = FileOps.remove target + unless res + -- can't remove old target file, probably in use or lack of permissions + -- try to rename and then delete it + FileOps.logger\debug msgs.move.inUseTryingRename, target + junkName = "#{target}.depCtrlRemoved" + -- There might be an old removed file we couldn't delete before + FileOps.remove junkName + res = os.rename target, junkName + unless res + return false, msgs.move.cantRemove\format target, err + -- rename succeeded, now clean up after ourselves + res, _, err = FileOps.remove junkName, false, true + unless res + FileOps.logger\debug msgs.move.renamedDeletionFailed, junkName, err, msgs.generic.deletionRescheduled + + elseif mode -- a directory (or something else) of the same name as the target file is already present + return false, msgs.move.exists\format source, target, mode + elseif mode == nil -- if retrieving the attributes of a file fails, something is probably wrong + return false, msgs.move.genericError\format source, target, err + + else -- target file not found, check directory + res, dir = FileOps.mkdir target, true + if res == nil + return false, msgs.move.createDirError\format source, target, err + elseif res + FileOps.logger\trace msgs.move.createdDir, dir + + -- at this point the target directory exists and the target file doesn't, move the file + res, err = os.rename source, target + unless res + -- renaming the file failed, could be because of a permission issue + -- but me might a well be trying to rename over file system boundaries on *nix + -- so we should try copy + remove before giving up + FileOps.logger\debug msgs.move.cantRenameTryingCopy, source, target, err + renErr, res, err = err, FileOps.copy source, target + unless res + return false, msgs.move.cantCopy\format source, target, err, renErr + res, details = FileOps.remove source, false, true -- TODO: also support directories/recursion, but also require copy to support it + + unless res + fileList = table.concat ["#{path}: #{res[2]}" for path, res in pairs details when not res[1]], "\n" + FileOps.logger\debug msgs.move.couldntRemoveFiles, fileList, msgs.generic.deletionRescheduled + + return true + + rmdir: (path, recurse = true) -> + return nil, msgs.rmdir.emptyPath if path == "" + mode, path = FileOps.attributes path, "mode" + return nil, msgs.rmdir.notPath unless mode == "directory" + + if recurse + -- recursively remove contained files and directories + toRemove = ["#{path}/#{file}" for file in lfs.dir path] + res, details = FileOps.remove toRemove, true + unless res + fileList = table.concat ["#{path}: #{res[2]}" for path, res in pairs details when not res[1]], "\n" + return nil, msgs.rmdir.couldntRemoveFiles\format fileList + + -- remove empty directory + success, err = lfs.rmdir path + unless success + return nil, msgs.rmdir.couldntRemoveDir\format err + + return true + + mkdir: (path, isFile) -> + mode, fullPath, dev, dir, file = FileOps.attributes path, "mode" + dir = isFile and table.concat({dev,dir or file}) or fullPath + + if mode == nil + return nil, msgs.attributes.genericError\format fullPath + elseif not mode + res, err = lfs.mkdir dir + if err -- can't create directory (possibly a permission error) + return nil, msgs.mkdir.createError\format err + return true, dir + elseif mode != "directory" -- a file of the same name as the target directory is already present + return nil, msgs.mkdir.otherExists\format mode + return false, dir + + attributes: (path, key) -> + fullPath, dev, dir, file = FileOps.validateFullPath path + unless fullPath + path = "#{lfs.currentdir!}/#{path}" + fullPath, dev, dir, file = FileOps.validateFullPath path + unless fullPath + return nil, msgs.attributes.badPath\format dev + + attr, err = lfs.attributes fullPath, key + if err + return nil, msgs.attributes.genericError\format err + elseif not attr + return false, fullPath, dev, dir, file + + return attr, fullPath, dev, dir, file + + validateFullPath: (path, checkFileExt) -> + if type(path) != "string" + return nil, msgs.validateFullPath.badType\format type(path) + -- expand aegisub path specifiers + path = aegisub.decode_path path + -- expand home directory on linux + homeDir = os.getenv "HOME" + path = path\gsub "^~", "{#homeDir}/" if homeDir + -- use single native path separators + path = path\gsub "[\\/]+", pathMatch.sep + -- check length + if #path > pathMatch.maxLen + return false, msgs.validateFullPath.tooLong\format #path, maxLen + -- check for invalid characters + invChar = path\match pathMatch.invalidChars, ffi.os == "Windows" and 3 or nil + if invChar + return false, msgs.validateFullPath.invalidChars\format invChar + -- check for reserved file names + reserved = pathMatch.reservedNames\match path + if reserved + return false, msgs.validateFullPath.reservedNames\format reserved[2].str + -- check for path escalation + if path\match "%.%." + return false, msgs.validateFullPath.parentPath + + -- check if we got a valid full path + matches = pathMatch.pattern\match path + dev, dir, file = matches[2].str, matches[3].str, matches[4].str if matches + unless dev + return false, msgs.validateFullPath.notFullPath + if checkFileExt and not (file and file\match ".+%.+") + return false, msgs.validateFullPath.missingExt + + path = table.concat({dev, dir, file and pathMatch.sep, file}) + return path, dev, dir, file \ No newline at end of file diff --git a/modules/DependencyControl/UnitTestSuite.moon b/modules/DependencyControl/UnitTestSuite.moon index f25675d..6190fef 100644 --- a/modules/DependencyControl/UnitTestSuite.moon +++ b/modules/DependencyControl/UnitTestSuite.moon @@ -1,846 +1,846 @@ - -Logger = require "l0.DependencyControl.Logger" -re = require "aegisub.re" --- make sure tests can be loaded from the test directory -package.path ..= aegisub.decode_path("?user/automation/tests") .. "/?.lua;" - ---- A class for all single unit tests. --- Provides useful assertion and logging methods for a user-specified test function. --- @classmod UnitTest -class UnitTest - @msgs = { - run: { - setup: "Performing setup... " - teardown: "Performing teardown... " - test: "Running test '%s'... " - ok: "OK." - failed: "FAILED!" - reason: "Reason: %s" - } - new: { - badTestName: "Test name must be of type %s, got a %s." - } - - assert: { - true: "Expected true, actual value was %s." - false: "Expected false, actual value was %s." - nil: "Expected nil, actual value was %s." - notNil: "Got nil when a value was expected." - truthy: "Expected a truthy value, actual value was falsy (%s)." - falsy: "Expected a falsy value, actual value was truthy (%s)." - type: "Expected a value of type %s, actual value was of type %s." - sameType: "Type of expected value (%s) didn't match type of actual value (%s)." - inRange: "Expected value to be in range [%d .. %d], actual value %d was %s %d." - almostEquals: "Expected value to be almost equal %d ± %d, actual value was %d." - notAlmostEquals: "Expected numerical value to not be close to %d ± %d, actual value was %d." - checkArgTypes: "Expected argument #%d (%s) to be of type %s, got a %s." - zero: "Expected 0, actual value was a %s." - notZero: "Got a 0 when a number other than 0 was expected." - compare: "Expected value to be a number %s %d, actual value was %d." - integer: "Expected numerical value to be an integer, actual value was %d." - positiveNegative: "Expected a %s number (0 %s), actual value was %d." - equals: "Actual value didn't match expected value.\n%s actual: %s\n%s expected: %s" - notEquals: "Actual value equals expected value when it wasn't supposed to:\n%s actual: %s" - is: "Expected %s, actual value was %s." - isNot: "Actual value %s was identical to the expected value when it wasn't supposed to." - itemsEqual: "Actual item values of table weren't %s to the expected values (checked %s):\n Actual: %s\nExpected: %s" - itemsEqualNumericKeys: "only continuous numerical keys" - itemsEqualAllKeys: "all keys" - continuous: "Expected table to have continuous numerical keys, but value at index %d of %d was a nil." - matches: "String value '%s' didn't match expected %s pattern '%s'." - contains: "String value '%s' didn't contain expected substring '%s' (case-%s comparison)." - error: "Expected function to throw an error but it succesfully returned %d values: %s" - errorMsgMatches: "Error message '%s' didn't match expected %s pattern '%s'." - } - - formatTemplate: { - type: "'%s' of type %s" - } - } - - --- Creates a single unit test. - -- Instead of calling this constructor you'd usually provide test data - -- in a table structure to @{UnitTestSuite:new} as an argument. - -- @tparam string name a descriptive title for the test - -- @tparam function(UnitTest, ...) testFunc the function containing the test code - -- @tparam UnitTestClass testClass the test class this test belongs to - -- @treturn UnitTest the unit test - -- @see UnitTestSuite:new - new: (@name, @f = -> , @testClass) => - @logger = @testClass.logger - error type(@logger) unless type(@logger) == "table" - @logger\assert type(@name) == "string", @@msgs.new.badTestName, type @name - - --- Runs the unit test function. - -- In addition to the @{UnitTest} object itself, it also passes - -- the specified arguments into the function. - -- @param[opt] args any optional modules or other data the test function needs - -- @treturn[1] boolean true (test succeeded) - -- @treturn[2] boolean false (test failed) - -- @treturn[2] string the error message describing how the test failed - run: (...) => - @assertFailed = false - @logStart! - @success, res = xpcall @f, debug.traceback, @, ... - @logResult res - - return @success, @errMsg - - --- Formats and writes a "running test x" message to the log. - -- @local - logStart: => - @logger\logEx nil, @@msgs.run.test, false, nil, nil, @name - - --- Formats and writes the test result to the log. - -- In case of failure the message contains details about either the test assertion that failed - -- or a stack trace if the test ran into a different exception. - -- @local - -- @tparam[opt=errMsg] the error message being logged; defaults to the error returned by the last run of this test - logResult: (errMsg = @errMsg) => - if @success - @logger\logEx nil, @@msgs.run.ok, nil, nil, 0 - else - if @assertFailed - -- scrub useless stack trace from asserts provided by this module - errMsg = errMsg\gsub "%[%w+ \".-\"%]:%d+:", "" - errMsg = errMsg\gsub "stack traceback:.*", "" - @errMsg = errMsg - @logger\logEx nil, @@msgs.run.failed, nil, nil, 0 - @logger.indent += 1 - @logger\log @@msgs.run.reason, @errMsg - @logger.indent -= 1 - - --- Formats a message with a specified predefined template. - -- Currently only supports the "type" template. - -- @local - -- @tparam string template the name of the template to use - -- @param[opt] args any arguments required for formatting the message - format: (tmpl, ...) => - inArgs = table.pack ... - outArgs = switch tmpl - when "type" then {tostring(inArgs[1]), type(inArgs[1])} - - @@msgs.formatTemplate[tmpl]\format unpack outArgs - - - -- static helper functions - - --- Compares equality of two specified arguments - -- Requirements for values are considered equal: - -- [1] their types match - -- [2] their metatables are equal - -- [3] strings and numbers are compared by value - -- functions and cdata are compared by reference - -- tables must have equal values at identical indexes and are compared recursively - -- (i.e. two table copies of `{"a", {"b"}}` are considered equal) - -- @static - -- @param a the first value - -- @param b the second value - -- @tparam[opt] string aType if already known, specify the type of the first value - -- for a small performance benefit - -- @tparam[opt] string bType the type of the second value - -- @treturn boolean `true` if a and b are equal, otherwise `false` - equals: (a, b, aType, bType) -> - -- TODO: support equality comparison of tables used as keys - treeA, treeB, depth = {}, {}, 0 - - recurse = (a, b, aType = type a, bType) -> - -- identical values are equal - return true if a == b - -- only tables can be equal without also being identical - bType or= type b - return false if aType != bType or aType != "table" - - -- perform table equality comparison - return false if #a != #b - - aFieldCnt, bFieldCnt = 0, 0 - local tablesSeenAtKeys - - depth += 1 - treeA[depth], treeB[depth] = a, b - - for k, v in pairs a - vType = type v - if vType == "table" - -- comparing tables is expensive so we should keep a list - -- of keys we can skip checking when iterating table b - tablesSeenAtKeys or= {} - tablesSeenAtKeys[k] = true - - -- detect synchronous circular references to prevent infinite recursion loops - for i = 1, depth - return true if v == treeA[i] and b[k] == treeB[i] - - unless recurse v, b[k], vType - depth -= 1 - return false - - aFieldCnt += 1 - - for k, v in pairs b - continue if tablesSeenAtKeys and tablesSeenAtKeys[k] - if bFieldCnt == aFieldCnt or not recurse v, a[k] - -- no need to check further if the field count is not identical - depth -= 1 - return false - bFieldCnt += 1 - - -- check metatables for equality - res = recurse getmetatable(a), getmetatable b - depth -= 1 - return res - - return recurse a, b, aType, bType - - - --- Compares equality of two specified tables ignoring table keys. - -- The table comparison works much in the same way as @{UnitTest:equals}, - -- however this method doesn't require table keys to be equal between a and b - -- and considers two tables to be equal if an equal value is found in b for every value in a and vice versa. - -- By default this only looks at numerical indexes - -- as this kind of comparison doesn't usually make much sense for hashtables. - -- @static - -- @tparam table a the first table - -- @tparam table b the second table - -- @tparam[opt=true] bool onlyNumericalKeys Disable this option to also compare items with non-numerical keys - -- at the expense of a performance hit. - -- @tparam[opt=false] bool ignoreExtraAItems Enable this option to make the comparison one-sided, - -- ignoring additional items present in a but not in b. - -- @tparam[opt=false] bool requireIdenticalItems Enable this option if you require table items to be identical, - -- i.e. compared by reference, rather than by equality. - itemsEqual: (a, b, onlyNumKeys = true, ignoreExtraAItems, requireIdenticalItems) -> - seen, aTbls = {}, {} - aCnt, aTblCnt, bCnt = 0, 0, 0 - - findEqualTable = (bTbl) -> - for i, aTbl in ipairs aTbls - if UnitTest.equals aTbl, bTbl - table.remove aTbls, i - seen[aTbl] = nil - return true - return false - - if onlyNumKeys - aCnt, bCnt = #a, #b - return false if not ignoreExtraAItems and aCnt != bCnt - - for v in *a - seen[v] = true - if "table" == type v - aTblCnt += 1 - aTbls[aTblCnt] = v - - for v in *b - -- identical values - if seen[v] - seen[v] = nil - continue - - -- equal values - if type(v) != "table" or requireIdenticalItems or not findEqualTable v - return false - - - else - for _, v in pairs a - aCnt += 1 - seen[v] = true - if "table" == type v - aTblCnt += 1 - aTbls[aTblCnt] = v - - for _, v in pairs b - bCnt += 1 - -- identical values - if seen[v] - seen[v] = nil - continue - - -- equal values - if type(v) != "table" or requireIdenticalItems or not findEqualTable v - return false - - return false if not ignoreExtraAItems and aCnt != bCnt - - return true - - --- Helper method to mark a test as failed by assertion and throw a specified error message. - -- @local - -- @param condition passing in a falsy value causes the assertion to fail - -- @tparam string message error message (may contain format string templates) - -- @param[opt] args any arguments required for formatting the message - assert: (condition, ...) => - args = table.pack ... - msg = table.remove args, 1 - unless condition - @assertFailed = true - @logger\logEx 1, msg, nil, nil, 0, unpack args - - - -- type assertions - - --- Fails the assertion if the specified value didn't have the expected type. - -- @param value the value to be type-checked - -- @tparam string expectedType the expected type - assertType: (val, expected) => - @checkArgTypes val: {val, "_any"}, expected: {expected, "string"} - actual = type val - @assert actual == expected, @@msgs.assert.type, expected, actual - - --- Fails the assertion if the types of the actual and expected value didn't match - -- @param actual the actual value - -- @param expected the expected value - assertSameType: (actual, expected) => - actualType, expectedType = type(actual), type expected - @assert actualType == expectedType, @@msgs.assert.sameType, expectedType, actualType - - --- Fails the assertion if the specified value isn't a boolean - -- @param value the value expected to be a boolean - assertBoolean: (val) => @assertType val, "boolean" - --- Shorthand for @{UnitTest:assertBoolean} - assertBool: (val) => @assertType val, "boolean" - - --- Fails the assertion if the specified value isn't a function - -- @param value the value expected to be a function - assertFunction: (val) => @assertType val, "function" - - --- Fails the assertion if the specified value isn't a number - -- @param value the value expected to be a number - assertNumber: (val) => @assertType val, "number" - - --- Fails the assertion if the specified value isn't a string - -- @param value the value expected to be a string - assertString: (val) => @assertType val, "string" - - --- Fails the assertion if the specified value isn't a table - -- @param value the value expected to be a table - assertTable: (val) => @assertType val, "table" - - --- Helper method to type-check arguments as a prerequisite to other asserts. - -- @local - -- @tparam {[string]={value, string}} args a hashtable of argument values and expected types - -- indexed by the respective argument names - checkArgTypes: (args) => - i, expected, actual = 1 - for name, types in pairs args - actual, expected = types[2], type types[1] - continue if expected == "_any" - @logger\assert actual == expected, @@msgs.assert.checkArgTypes, i, name, - expected, @format "type", types[1] - i += 1 - - - -- boolean asserts - - --- Fails the assertion if the specified value isn't the boolean `true`. - -- @param value the value expected to be `true` - assertTrue: (val) => - @assert val == true, @@msgs.assert.true, @format "type", val - - --- Fails the assertion if the specified value doesn't evaluate to boolean `true`. - -- In Lua this is only ever the case for `nil` and boolean `false`. - -- @param value the value expected to be truthy - assertTruthy: (val) => - @assert val, @@msgs.assert.truthy, @format "type", val - - --- Fails the assertion if the specified value isn't the boolean `false`. - -- @param value the value expected to be `false` - assertFalse: (val) => - @assert val == false, @@msgs.assert.false, @format "type", val - - --- Fails the assertion if the specified value doesn't evaluate to boolean `false`. - -- In Lua `nil` is the only other value that evaluates to `false`. - -- @param value the value expected to be falsy - assertFalsy: (val) => - @assert not val, @@msgs.assert.falsy, @format "type", val - - --- Fails the assertion if the specified value is not `nil`. - -- @param value the value expected to be `nil` - assertNil: (val) => - @assert val == nil, @@msgs.assert.nil, @format "type", val - - --- Fails the assertion if the specified value is `nil`. - -- @param value the value expected to not be `nil` - assertNotNil: (val) => - @assert val != nil, @@msgs.assert.notNil, @format "type", val - - - -- numerical asserts - - --- Fails the assertion if a number is out of the specified range. - -- @tparam number actual the number expected to be in range - -- @tparam number min the minimum (inclusive) value - -- @tparam number max the maximum (inclusive) value - assertInRange: (actual, min = -math.huge, max = math.huge) => - @checkArgTypes actual: {actual, "number"}, min: {min, "number"}, max: {max, "number"} - @assert actual >= min, @@msgs.assert.inRange, min, max, actual, "<", min - @assert actual <= max, @@msgs.assert.inRange, min, max, actual, ">", max - - --- Fails the assertion if a number is not lower than the specified value. - -- @tparam number actual the number to compare - -- @tparam number limit the lower limit (exclusive) - assertLessThan: (actual, limit) => - @checkArgTypes actual: {actual, "number"}, limit: {limit, "number"} - @assert actual < max, @@msgs.assert.compare, "<", limit, actual - - --- Fails the assertion if a number is not lower than or equal to the specified value. - -- @tparam number actual the number to compare - -- @tparam number limit the lower limit (inclusive) - assertLessThanOrEquals: (actual, limit) => - @checkArgTypes actual: {actual, "number"}, limit: {limit, "number"} - @assert actual <= max, @@msgs.assert.compare, "<=", limit, actual - - --- Fails the assertion if a number is not greater than the specified value. - -- @tparam number actual the number to compare - -- @tparam number limit the upper limit (exclusive) - assertGreaterThan: (actual, limit) => - @checkArgTypes actual: {actual, "number"}, limit: {limit, "number"} - @assert actual > max, @@msgs.assert.compare, ">", limit, actual - - --- Fails the assertion if a number is not greater than or equal to the specified value. - -- @tparam number actual the number to compare - -- @tparam number limit the upper limit (inclusive) - assertGreaterThanOrEquals: (actual, limit) => - @checkArgTypes actual: {actual, "number"}, limit: {limit, "number"} - @assert actual >= max, @@msgs.assert.compare, ">=", limit, actual - - --- Fails the assertion if a number is not in range of an expected value +/- a specified margin. - -- @tparam number actual the actual value - -- @tparam number expected the expected value - -- @tparam[opt=1e-8] number margin the maximum (inclusive) acceptable margin of error - assertAlmostEquals: (actual, expected, margin = 1e-8) => - @checkArgTypes actual: {actual, "number"}, min: {expected, "number"}, max: {margin, "number"} - - margin = math.abs margin - @assert math.abs(actual-expected) <= margin, @@msgs.assert.almostEquals, - expected, margin, actual - - --- Fails the assertion if a number differs from another value at most by a specified margin. - -- Inverse of @{assertAlmostEquals} - -- @tparam number actual the actual value - -- @tparam number value the value being compared against - -- @tparam[opt=1e-8] number margin the maximum (inclusive) margin of error for the numbers to be considered equal - assertNotAlmostEquals: (actual, value, margin = 1e-8) => - @checkArgTypes actual: {actual, "number"}, value: {value, "number"}, max: {margin, "number"} - - margin = math.abs margin - @assert math.abs(actual-expected) > margin, @@msgs.assert.almostEquals, - expected, margin, actual - - --- Fails the assertion if a number is not equal to 0 (zero). - -- @tparam number actual the value - assertZero: (actual) => - @checkArgTypes actual: {actual, "number"} - @assert actual == 0, @@msgs.assert.zero, actual - - --- Fails the assertion if a number is equal to 0 (zero). - -- Inverse of @{assertZero} - -- @tparam number actual the value - assertNotZero: (actual) => - @checkArgTypes actual: {actual, "number"} - @assert actual != 0, @@msgs.assert.notZero - - --- Fails the assertion if a specified number has a fractional component. - -- All numbers in Lua share a common data type, which is usually a double, - -- which is the reason this is not a type check. - -- @tparam number actual the value - assertInteger: (actual) => - @checkArgTypes actual: {actual, "number"} - @assert math.floor(actual) == actual, @@msgs.assert.integer, actual - - --- Fails the assertion if a specified number is less than or equal 0. - -- @tparam number actual the value - -- @tparam[opt=false] boolean includeZero makes the assertion consider 0 to be positive - assertPositive: (actual, includeZero = false) => - @checkArgTypes actual: {actual, "number"}, includeZero: {includeZero, "boolean"} - res = includeZero and actual >= 0 or actual > 0 - @assert res, @@msgs.assert.positiveNegative, "positive", - includeZero and "included" or "excluded" - - --- Fails the assertion if a specified number is greater than or equal 0. - -- @tparam number actual the value - -- @tparam[opt=false] boolean includeZero makes the assertion not fail when a 0 is encountered - assertNegative: (actual, includeZero = false) => - @checkArgTypes actual: {actual, "number"}, includeZero: {includeZero, "boolean"} - res = includeZero and actual <= 0 or actual < 0 - @assert res, @@msgs.assert.positiveNegative, "positive", - includeZero and "included" or "excluded" - - - -- generic asserts - - --- Fails the assertion if a the actual value is not *equal* to the expected value. - -- On the requirements for equality see @{UnitTest:equals} - -- @param actual the actual value - -- @param expected the expected value - assertEquals: (actual, expected) => - @assert self.equals(actual, expected), @@msgs.assert.equals, type(actual), - @logger\dumpToString(actual), type(expected), @logger\dumpToString expected - - --- Fails the assertion if a the actual value is *equal* to the expected value. - -- Inverse of @{UnitTest:assertEquals} - -- @param actual the actual value - -- @param expected the expected value - assertNotEquals: (actual, expected) => - @assert not self.equals(actual, expected), @@msgs.assert.notEquals, - type(actual), @logger\dumpToString expected - - --- Fails the assertion if a the actual value is not *identical* to the expected value. - -- Uses the `==` operator, so in contrast to @{UnitTest:assertEquals}, - -- this assertion compares tables by reference. - -- @param actual the actual value - -- @param expected the expected value - assertIs: (actual, expected) => - @assert actual == expected, @@msgs.assert.is, @format("type", expected), - @format "type", actual - - --- Fails the assertion if a the actual value is *identical* to the expected value. - -- Inverse of @{UnitTest:assertIs} - -- @param actual the actual value - -- @param expected the expected value - assertIsNot: (actual, expected) => - @assert actual != expected, @@msgs.assert.isNot, @format "type", expected - - - -- table asserts - - --- Fails the assertion if the items of one table aren't *equal* to the items of another. - -- Unlike @{UnitTest:assertEquals} this ignores table keys, so e.g. two numerically-keyed tables - -- with equal items in a different order would still be considered equal. - -- By default this assertion only compares values at numerical indexes (see @{UnitTest:itemsEqual} for details). - -- @tparam table actual the first table - -- @tparam table expected the second table - -- @tparam[opt=true] boolean onlyNumericalKeys Disable this option to also compare items with non-numerical keys at the expense of a performance hit. - assertItemsEqual: (actual, expected, onlyNumKeys = true) => - @checkArgTypes { actual: {actual, "table"}, expected: {actual, "table"}, - onlyNumKeys: {onlyNumKeys, "boolean"} - } - - @assert self.itemsEqual(actual, expected, onlyNumKeys), - msgs.assert[onlyNumKeys and "itemsEqualNumericKeys" or "itemsEqualAllKeys"], - @logger\dumpToString(actual), @logger\dumpToString expected - - - --- Fails the assertion if the items of one table aren't *identical* to the items of another. - -- Like @{UnitTest:assertItemsEqual} this ignores table keys, however it compares table items by reference. - -- By default this assertion only compares values at numerical indexes (see @{UnitTest:itemsEqual} for details). - -- @tparam table actual the first table - -- @tparam table expected the second table - -- @tparam[opt=true] boolean onlyNumericalKeys Disable this option to also compare items with non-numerical keys - assertItemsAre: (actual, expected, onlyNumKeys = true) => - @checkArgTypes { actual: {actual, "table"}, expected: {actual, "table"}, - onlyNumKeys: {onlyNumKeys, "boolean"} - } - - @assert self.itemsEqual(actual, expected, onlyNumKeys, nil, true), - msgs.assert[onlyNumKeys and "itemsEqualNumericKeys" or "itemsEqualAllKeys"], - @logger\dumpToString(actual), @logger\dumpToString expected - - --- Fails the assertion if the numerically-keyed items of a table aren't continuous. - -- The rationale for this is that when iterating a table with ipairs or retrieving its length - -- with the # operator, Lua may stop processing the table once the item at index n is nil, - -- effectively hiding any subsequent values - -- @tparam table tbl the table to be checked - assertContinuous: (tbl) => - @checkArgTypes { tbl: {tbl, "table"} } - - realCnt, contCnt = 0, #tbl - for _, v in pairs tbl - if type(v) == "number" and math.floor(v) == v - realCnt += 1 - - @assert realCnt == contCnt, msgs.assert.continuous, contCnt+1, realCnt - - -- string asserts - - --- Fails the assertion if a string doesn't match the specified pattern. - -- Supports both Lua and Regex patterns. - -- @tparam string str the input string - -- @tparam string pattern the pattern to be matched against - -- @tparam[opt=false] boolean useRegex Enable this option to use Regex instead of Lua patterns - -- @tparam[optchain] re.Flags flags Any amount of regex flags as defined by the Aegisub re module - -- (see here for details: http://docs.aegisub.org/latest/Automation/Lua/Modules/re/#flags) - assertMatches: (str, pattern, useRegex = false, ...) => - @checkArgTypes { str: {str, "string"}, pattern: {pattern, "string"}, - useRegex: {useRegex, "boolean"} - } - - match = useRegex and re.match(str, pattern, ...) or str\match pattern, ... - @assert msgs.assert.matches, str, useRegex and "regex" or "Lua", pattern - - --- Fails the assertion if a string doesn't contain a specified substring. - -- Search is case-sensitive by default. - -- @tparam string str the input string - -- @tparam string needle the substring to be found - -- @tparam[opt=true] boolean caseSensitive Disable this option to use locale-dependent case-insensitive comparison. - -- @tparam[opt=1] number init the first byte to start the search at - assertContains: (str, needle, caseSensitive = true, init = 1) => - @checkArgTypes { str: {str, "string"}, needle: {needle, "string"}, - caseSensitive: {caseSensitive, "boolean"}, init: {init, "number"} - } - - _str, _needle = if caseSensitive - str\lower!, needle\lower! - else str, needle - @assert str\find(needle, init, true), str, needle, - caseSensitive and "sensitive" or "insensitive" - - -- function asserts - - - --- Fails the assertion if calling a function with the specified arguments doesn't cause it throw an error. - -- @tparam function func the function to be called - -- @param[opt] args any number of arguments to be passed into the function - assertError: (func, ...) => - @checkArgTypes { func: {func, "function"} } - - res = table.pack pcall func, ... - retCnt, success = res.n, table.remove res, 1 - res.n = nil - @assert success == false, msgs.assert.error, retCnt, @logger\dumpToString res - return res[1] - - --- Fails the assertion if a function call doesn't cause an error message that matches the specified pattern. - -- Supports both Lua and Regex patterns. - -- @tparam function func the function to be called - -- @tparam[opt={}] table args a table of any number of arguments to be passed into the function - -- @tparam string pattern the pattern to be matched against - -- @tparam[opt=false] boolean useRegex Enable this option to use Regex instead of Lua patterns - -- @tparam[optchain] re.Flags flags Any amount of regex flags as defined by the Aegisub re module - -- (see here for details: http://docs.aegisub.org/latest/Automation/Lua/Modules/re/#flags) - assertErrorMsgMatches: (func, params = {}, pattern, useRegex = false, ...) => - @checkArgTypes { func: {func, "function"}, params: {params, "table"}, - pattern: {pattern, "string"}, useRegex: {useRegex, "boolean"} - } - msg = @assertError func, unpack params - - match = useRegex and re.match(msg, pattern, ...) or msg\match pattern, ... - @assert msgs.assert.errorMsgMatches, msg, useRegex and "regex" or "Lua", pattern - - ---- A special case of the UnitTest class for a setup routine --- @classmod UnitTestSetup -class UnitTestSetup extends UnitTest - --- Runs the setup routine. - -- Only the @{UnitTestSetup} object is passed into the function. - -- Values returned by the setup routine are stored to be passed into the test functions later. - -- @treturn[1] boolean true (test succeeded) - -- @treturn[1] table retVals all values returned by the function packed into a table - -- @treturn[2] boolean false (test failed) - -- @treturn[2] string the error message describing how the test failed - run: => - @logger\logEx nil, @@msgs.run.setup, false - - res = table.pack pcall @f, @ - @success = table.remove res, 1 - @logResult res[1] - - if @success - @retVals = res - return true, @retVals - - return false, @errMsg - ---- A special case of the UnitTest class for a teardown routine --- @classmod UnitTestTeardown -class UnitTestTeardown extends UnitTest - --- Formats and writes a "running test x" message to the log. - -- @local - logStart: => - @logger\logEx nil, @@msgs.run.teardown, false - - ---- Holds a unit test class, i.e. a group of unit tests with common setup and teardown routines --- @classmod UnitTestClass -class UnitTestClass - msgs = { - run: { - runningTests: "Running test class '%s' (%d tests)..." - setupFailed: "Setup for test class '%s' FAILED, skipping tests." - abort: "Test class '%s' FAILED after %d tests, aborting." - testsFailed: "Done testing class '%s'. FAILED %d of %d tests." - success: "Test class '%s' completed successfully." - testNotFound: "Couldn't find requested test '%s'." - } - } - - --- Creates a new unit test class complete with a number of unit test as well as optional setup and teardown. - -- Instead of calling this constructor directly, it is recommended to call @{UnitTestSuite:new} instead, - -- which takes a table of test functions and creates test classes automatically. - -- @tparam string name a descriptive name for the test class - -- @tparam[opt={}] {[string] = function|table, ...} args a table of test functions by name; - -- indexes starting with "_" have special meaning and are not added as regular tests: - -- * _setup: a @{UnitTestSetup} routine - -- * _teardown: a @{UnitTestTeardown} routine - -- * _order: alternative syntax to the order parameter (see below) - -- @tparam [opt=nil (unordered)] {string, ...} An list of test names in the desired execution order. - -- Only tests mentioned in this table will be performed when running the whole test class. - -- If unspecified, all tests will be run in random order. - new: (@name, args = {}, @order, @testSuite) => - @logger = @testSuite.logger - @setup = UnitTestSetup "setup", args._setup, @ - @teardown = UnitTestTeardown "teardown", args._teardown, @ - @description = args._description - @order or= args._order - @tests = [UnitTest(name, f, @) for name, f in pairs args when "_" != name\sub 1,1] - - --- Runs all tests in the unit test class in the specified order. - -- @param[opt=false] abortOnFail stops testing once a test fails - -- @param[opt=(default)] overrides the default test order - -- @treturn[1] boolean true (test class succeeded) - -- @treturn[2] boolean false (test class failed) - -- @treturn[2] {@{UnitTest}, ...} a list of unit test that failed - run: (abortOnFail, order = @order) => - tests, failed = @tests, {} - if order - tests, mappings = {}, {test.name, test for test in *@tests} - for i, name in ipairs order - @logger\assert mappings[name], msgs.run.testNotFound, name - tests[i] = mappings[name] - testCnt, failedCnt = #tests, 0 - - @logger\log msgs.run.runningTests, @name, testCnt - @logger.indent += 1 - - success, res = @setup\run! - -- failing the setup always aborts - unless success - @logger.indent -= 1 - @logger\warn msgs.run.setupFailed, @name - return false, -1 - - for i, test in pairs tests - unless test\run unpack res - failedCnt += 1 - failed[#failed+1] = test - if abortOnFail - @logger.indent -= 1 - @logger\warn msgs.run.abort, @name, i - return false, failed - - @logger.indent -= 1 - @success = failedCnt == 0 - - if @success - @logger\log msgs.run.success, @name - return true - - @logger\log msgs.run.testsFailed, @name, failedCnt, testCnt - return false, failed - - ---- A DependencyControl unit test suite. --- Your test file/module must reteturn a UnitTestSuite object in order to be recognized as a test suite. -class UnitTestSuite - msgs = { - run: { - running: "Running %d test classes for %s... " - aborted: "Aborting after %d test classes... " - classesFailed: "FAILED %d of %d test classes." - success: "All tests completed successfully." - classNotFound: "Couldn't find requested test class '%s'." - } - registerMacros: { - allDesc: "Runs the whole test suite." - } - new: { - badClassesType: "Test classes must be passed in either as a table or an import function, got a %s" - } - import: { - noTableReturned: "The test import function must return a table of test classes, got a %s." - } - } - - @UnitTest = UnitTest - @UnitTestClass = UnitTestClass - @testDir = {macros: aegisub.decode_path("?user/automation/tests/DepUnit/macros"), - modules: aegisub.decode_path("?user/automation/tests/DepUnit/modules")} - - --- Creates a complete unit test suite for a module or automation script. - -- Using this constructor will create all test classes and tests automatically. - -- @tparam string namespace the namespace of the module or automation script to test. - -- @tparam {[string] = table, ...}|function(self, dependencies, args...) args To create a UnitTest suite, - -- you must supply a hashtable of @{UnitTestClass} constructor tables by name. You can either do so directly, - -- or wrap it in a function that takes a number of arguments depending on how the tests are registered: - -- * self: the module being testsed (skipped for automation scripts) - -- * dependencies: a numerically keyed table of all the modules required by the tested script/module (in order) - -- * args: any additional arguments passed into the @{DependencyControl\registerTests} function. - -- Doing so is required to test automation scripts as well as module functions not exposed by its API. - -- indexes starting with "_" have special meaning and are not added as regular tests: - -- * _order: alternative syntax to the order parameter (see below) - -- @tparam [opt=nil (unordered)] {string, ...} An list of test class names in the desired execution order. - -- Only test classes mentioned in this table will be performed when running the whole test suite. - -- If unspecified, all test classes will be run in random order. - new: (@namespace, classes, @order) => - @logger = Logger defaultLevel: 3, fileBaseName: @namespace, fileSubName: "UnitTests", toFile: true - @classes = {} - switch type classes - when "table" then @addClasses classes - when "function" then @importFunc = classes - else @logger\error msgs.new.badClassesType, type classes - - --- Constructs test classes and adds them to the suite. - -- Use this if you need to add additional test classes to an existing @{UnitTestSuite} object. - -- @tparam {[string] = table, ...} args a hashtable of @{UnitTestClass} constructor tables by name. - addClasses: (classes) => - @classes[#@classes+1] = UnitTestClass(name, args, args._order, @) for name, args in pairs classes when "_" != name\sub 1,1 - if classes._order - @order or= {} - @order[#@order+1] = clsName for clsName in *classes._order - - --- Imports test classes from a function (passing in the specified arguments) and adds them to the suite. - -- Use this if you need to add additional test classes to an existing @{UnitTestSuite} object. - -- @tparam [opt] args a hashtable of @{UnitTestClass} constructor tables by name. - import: (...) => - return false unless @importFunc - classes = self.importFunc ... - @logger\assert type(classes) == "table", msgs.import.noTableReturned, type classes - @addClasses classes - @importFunc = nil - - --- Registers macros for running all or specific test classes of this suite. - -- If the test script is placed in the appropriate directory (according to module/automation script namespace), - -- this is automatically handled by DependencyControl. - registerMacros: => - menuItem = {"DependencyControl", "Run Tests", @name or @namespace, "[All]"} - aegisub.register_macro table.concat(menuItem, "/"), msgs.registerMacros.allDesc, -> @run! - for cls in *@classes - menuItem[4] = cls.name - aegisub.register_macro table.concat(menuItem, "/"), cls.description, -> cls\run! - - --- Runs all test classes of this suite in the specified order. - -- @param[opt=false] abortOnFail stops testing once a test fails - -- @param[opt=(default)] overrides the default test order - -- @treturn[1] boolean true (test class succeeded) - -- @treturn[2] boolean false (test class failed) - -- @treturn[2] {@{UnitTest}, ...} a list of unit test that failed - run: (abortOnFail, order = @order) => - classes, allFailed = @classes, {} - if order - classes, mappings = {}, {cls.name, cls for cls in *@classes} - for i, name in ipairs order - @logger\assert mappings[name], msgs.run.classNotFound, name - classes[i] = mappings[name] - - classCnt, failedCnt = #classes, 0 - @logger\log msgs.run.running, classCnt, @namespace - @logger.indent += 1 - - for i, cls in pairs classes - success, failed = cls\run abortOnFail - unless success - failedCnt += 1 - allFailed[#allFailed+1] = test for test in *failed - if abortOnFail - @logger.indent -= 1 - @logger\warn msgs.run.abort, i - return false, allFailed - - @logger.indent -= 1 - @success = failedCnt == 0 - if @success - @logger\log msgs.run.success - else @logger\log msgs.run.classesFailed, failedCnt, classCnt - + +Logger = require "l0.DependencyControl.Logger" +re = require "aegisub.re" +-- make sure tests can be loaded from the test directory +package.path ..= aegisub.decode_path("?user/automation/tests") .. "/?.lua;" + +--- A class for all single unit tests. +-- Provides useful assertion and logging methods for a user-specified test function. +-- @classmod UnitTest +class UnitTest + @msgs = { + run: { + setup: "Performing setup... " + teardown: "Performing teardown... " + test: "Running test '%s'... " + ok: "OK." + failed: "FAILED!" + reason: "Reason: %s" + } + new: { + badTestName: "Test name must be of type %s, got a %s." + } + + assert: { + true: "Expected true, actual value was %s." + false: "Expected false, actual value was %s." + nil: "Expected nil, actual value was %s." + notNil: "Got nil when a value was expected." + truthy: "Expected a truthy value, actual value was falsy (%s)." + falsy: "Expected a falsy value, actual value was truthy (%s)." + type: "Expected a value of type %s, actual value was of type %s." + sameType: "Type of expected value (%s) didn't match type of actual value (%s)." + inRange: "Expected value to be in range [%d .. %d], actual value %d was %s %d." + almostEquals: "Expected value to be almost equal %d ± %d, actual value was %d." + notAlmostEquals: "Expected numerical value to not be close to %d ± %d, actual value was %d." + checkArgTypes: "Expected argument #%d (%s) to be of type %s, got a %s." + zero: "Expected 0, actual value was a %s." + notZero: "Got a 0 when a number other than 0 was expected." + compare: "Expected value to be a number %s %d, actual value was %d." + integer: "Expected numerical value to be an integer, actual value was %d." + positiveNegative: "Expected a %s number (0 %s), actual value was %d." + equals: "Actual value didn't match expected value.\n%s actual: %s\n%s expected: %s" + notEquals: "Actual value equals expected value when it wasn't supposed to:\n%s actual: %s" + is: "Expected %s, actual value was %s." + isNot: "Actual value %s was identical to the expected value when it wasn't supposed to." + itemsEqual: "Actual item values of table weren't %s to the expected values (checked %s):\n Actual: %s\nExpected: %s" + itemsEqualNumericKeys: "only continuous numerical keys" + itemsEqualAllKeys: "all keys" + continuous: "Expected table to have continuous numerical keys, but value at index %d of %d was a nil." + matches: "String value '%s' didn't match expected %s pattern '%s'." + contains: "String value '%s' didn't contain expected substring '%s' (case-%s comparison)." + error: "Expected function to throw an error but it succesfully returned %d values: %s" + errorMsgMatches: "Error message '%s' didn't match expected %s pattern '%s'." + } + + formatTemplate: { + type: "'%s' of type %s" + } + } + + --- Creates a single unit test. + -- Instead of calling this constructor you'd usually provide test data + -- in a table structure to @{UnitTestSuite:new} as an argument. + -- @tparam string name a descriptive title for the test + -- @tparam function(UnitTest, ...) testFunc the function containing the test code + -- @tparam UnitTestClass testClass the test class this test belongs to + -- @treturn UnitTest the unit test + -- @see UnitTestSuite:new + new: (@name, @f = -> , @testClass) => + @logger = @testClass.logger + error type(@logger) unless type(@logger) == "table" + @logger\assert type(@name) == "string", @@msgs.new.badTestName, type @name + + --- Runs the unit test function. + -- In addition to the @{UnitTest} object itself, it also passes + -- the specified arguments into the function. + -- @param[opt] args any optional modules or other data the test function needs + -- @treturn[1] boolean true (test succeeded) + -- @treturn[2] boolean false (test failed) + -- @treturn[2] string the error message describing how the test failed + run: (...) => + @assertFailed = false + @logStart! + @success, res = xpcall @f, debug.traceback, @, ... + @logResult res + + return @success, @errMsg + + --- Formats and writes a "running test x" message to the log. + -- @local + logStart: => + @logger\logEx nil, @@msgs.run.test, false, nil, nil, @name + + --- Formats and writes the test result to the log. + -- In case of failure the message contains details about either the test assertion that failed + -- or a stack trace if the test ran into a different exception. + -- @local + -- @tparam[opt=errMsg] the error message being logged; defaults to the error returned by the last run of this test + logResult: (errMsg = @errMsg) => + if @success + @logger\logEx nil, @@msgs.run.ok, nil, nil, 0 + else + if @assertFailed + -- scrub useless stack trace from asserts provided by this module + errMsg = errMsg\gsub "%[%w+ \".-\"%]:%d+:", "" + errMsg = errMsg\gsub "stack traceback:.*", "" + @errMsg = errMsg + @logger\logEx nil, @@msgs.run.failed, nil, nil, 0 + @logger.indent += 1 + @logger\log @@msgs.run.reason, @errMsg + @logger.indent -= 1 + + --- Formats a message with a specified predefined template. + -- Currently only supports the "type" template. + -- @local + -- @tparam string template the name of the template to use + -- @param[opt] args any arguments required for formatting the message + format: (tmpl, ...) => + inArgs = table.pack ... + outArgs = switch tmpl + when "type" then {tostring(inArgs[1]), type(inArgs[1])} + + @@msgs.formatTemplate[tmpl]\format unpack outArgs + + + -- static helper functions + + --- Compares equality of two specified arguments + -- Requirements for values are considered equal: + -- [1] their types match + -- [2] their metatables are equal + -- [3] strings and numbers are compared by value + -- functions and cdata are compared by reference + -- tables must have equal values at identical indexes and are compared recursively + -- (i.e. two table copies of `{"a", {"b"}}` are considered equal) + -- @static + -- @param a the first value + -- @param b the second value + -- @tparam[opt] string aType if already known, specify the type of the first value + -- for a small performance benefit + -- @tparam[opt] string bType the type of the second value + -- @treturn boolean `true` if a and b are equal, otherwise `false` + equals: (a, b, aType, bType) -> + -- TODO: support equality comparison of tables used as keys + treeA, treeB, depth = {}, {}, 0 + + recurse = (a, b, aType = type a, bType) -> + -- identical values are equal + return true if a == b + -- only tables can be equal without also being identical + bType or= type b + return false if aType != bType or aType != "table" + + -- perform table equality comparison + return false if #a != #b + + aFieldCnt, bFieldCnt = 0, 0 + local tablesSeenAtKeys + + depth += 1 + treeA[depth], treeB[depth] = a, b + + for k, v in pairs a + vType = type v + if vType == "table" + -- comparing tables is expensive so we should keep a list + -- of keys we can skip checking when iterating table b + tablesSeenAtKeys or= {} + tablesSeenAtKeys[k] = true + + -- detect synchronous circular references to prevent infinite recursion loops + for i = 1, depth + return true if v == treeA[i] and b[k] == treeB[i] + + unless recurse v, b[k], vType + depth -= 1 + return false + + aFieldCnt += 1 + + for k, v in pairs b + continue if tablesSeenAtKeys and tablesSeenAtKeys[k] + if bFieldCnt == aFieldCnt or not recurse v, a[k] + -- no need to check further if the field count is not identical + depth -= 1 + return false + bFieldCnt += 1 + + -- check metatables for equality + res = recurse getmetatable(a), getmetatable b + depth -= 1 + return res + + return recurse a, b, aType, bType + + + --- Compares equality of two specified tables ignoring table keys. + -- The table comparison works much in the same way as @{UnitTest:equals}, + -- however this method doesn't require table keys to be equal between a and b + -- and considers two tables to be equal if an equal value is found in b for every value in a and vice versa. + -- By default this only looks at numerical indexes + -- as this kind of comparison doesn't usually make much sense for hashtables. + -- @static + -- @tparam table a the first table + -- @tparam table b the second table + -- @tparam[opt=true] bool onlyNumericalKeys Disable this option to also compare items with non-numerical keys + -- at the expense of a performance hit. + -- @tparam[opt=false] bool ignoreExtraAItems Enable this option to make the comparison one-sided, + -- ignoring additional items present in a but not in b. + -- @tparam[opt=false] bool requireIdenticalItems Enable this option if you require table items to be identical, + -- i.e. compared by reference, rather than by equality. + itemsEqual: (a, b, onlyNumKeys = true, ignoreExtraAItems, requireIdenticalItems) -> + seen, aTbls = {}, {} + aCnt, aTblCnt, bCnt = 0, 0, 0 + + findEqualTable = (bTbl) -> + for i, aTbl in ipairs aTbls + if UnitTest.equals aTbl, bTbl + table.remove aTbls, i + seen[aTbl] = nil + return true + return false + + if onlyNumKeys + aCnt, bCnt = #a, #b + return false if not ignoreExtraAItems and aCnt != bCnt + + for v in *a + seen[v] = true + if "table" == type v + aTblCnt += 1 + aTbls[aTblCnt] = v + + for v in *b + -- identical values + if seen[v] + seen[v] = nil + continue + + -- equal values + if type(v) != "table" or requireIdenticalItems or not findEqualTable v + return false + + + else + for _, v in pairs a + aCnt += 1 + seen[v] = true + if "table" == type v + aTblCnt += 1 + aTbls[aTblCnt] = v + + for _, v in pairs b + bCnt += 1 + -- identical values + if seen[v] + seen[v] = nil + continue + + -- equal values + if type(v) != "table" or requireIdenticalItems or not findEqualTable v + return false + + return false if not ignoreExtraAItems and aCnt != bCnt + + return true + + --- Helper method to mark a test as failed by assertion and throw a specified error message. + -- @local + -- @param condition passing in a falsy value causes the assertion to fail + -- @tparam string message error message (may contain format string templates) + -- @param[opt] args any arguments required for formatting the message + assert: (condition, ...) => + args = table.pack ... + msg = table.remove args, 1 + unless condition + @assertFailed = true + @logger\logEx 1, msg, nil, nil, 0, unpack args + + + -- type assertions + + --- Fails the assertion if the specified value didn't have the expected type. + -- @param value the value to be type-checked + -- @tparam string expectedType the expected type + assertType: (val, expected) => + @checkArgTypes val: {val, "_any"}, expected: {expected, "string"} + actual = type val + @assert actual == expected, @@msgs.assert.type, expected, actual + + --- Fails the assertion if the types of the actual and expected value didn't match + -- @param actual the actual value + -- @param expected the expected value + assertSameType: (actual, expected) => + actualType, expectedType = type(actual), type expected + @assert actualType == expectedType, @@msgs.assert.sameType, expectedType, actualType + + --- Fails the assertion if the specified value isn't a boolean + -- @param value the value expected to be a boolean + assertBoolean: (val) => @assertType val, "boolean" + --- Shorthand for @{UnitTest:assertBoolean} + assertBool: (val) => @assertType val, "boolean" + + --- Fails the assertion if the specified value isn't a function + -- @param value the value expected to be a function + assertFunction: (val) => @assertType val, "function" + + --- Fails the assertion if the specified value isn't a number + -- @param value the value expected to be a number + assertNumber: (val) => @assertType val, "number" + + --- Fails the assertion if the specified value isn't a string + -- @param value the value expected to be a string + assertString: (val) => @assertType val, "string" + + --- Fails the assertion if the specified value isn't a table + -- @param value the value expected to be a table + assertTable: (val) => @assertType val, "table" + + --- Helper method to type-check arguments as a prerequisite to other asserts. + -- @local + -- @tparam {[string]={value, string}} args a hashtable of argument values and expected types + -- indexed by the respective argument names + checkArgTypes: (args) => + i, expected, actual = 1 + for name, types in pairs args + actual, expected = types[2], type types[1] + continue if expected == "_any" + @logger\assert actual == expected, @@msgs.assert.checkArgTypes, i, name, + expected, @format "type", types[1] + i += 1 + + + -- boolean asserts + + --- Fails the assertion if the specified value isn't the boolean `true`. + -- @param value the value expected to be `true` + assertTrue: (val) => + @assert val == true, @@msgs.assert.true, @format "type", val + + --- Fails the assertion if the specified value doesn't evaluate to boolean `true`. + -- In Lua this is only ever the case for `nil` and boolean `false`. + -- @param value the value expected to be truthy + assertTruthy: (val) => + @assert val, @@msgs.assert.truthy, @format "type", val + + --- Fails the assertion if the specified value isn't the boolean `false`. + -- @param value the value expected to be `false` + assertFalse: (val) => + @assert val == false, @@msgs.assert.false, @format "type", val + + --- Fails the assertion if the specified value doesn't evaluate to boolean `false`. + -- In Lua `nil` is the only other value that evaluates to `false`. + -- @param value the value expected to be falsy + assertFalsy: (val) => + @assert not val, @@msgs.assert.falsy, @format "type", val + + --- Fails the assertion if the specified value is not `nil`. + -- @param value the value expected to be `nil` + assertNil: (val) => + @assert val == nil, @@msgs.assert.nil, @format "type", val + + --- Fails the assertion if the specified value is `nil`. + -- @param value the value expected to not be `nil` + assertNotNil: (val) => + @assert val != nil, @@msgs.assert.notNil, @format "type", val + + + -- numerical asserts + + --- Fails the assertion if a number is out of the specified range. + -- @tparam number actual the number expected to be in range + -- @tparam number min the minimum (inclusive) value + -- @tparam number max the maximum (inclusive) value + assertInRange: (actual, min = -math.huge, max = math.huge) => + @checkArgTypes actual: {actual, "number"}, min: {min, "number"}, max: {max, "number"} + @assert actual >= min, @@msgs.assert.inRange, min, max, actual, "<", min + @assert actual <= max, @@msgs.assert.inRange, min, max, actual, ">", max + + --- Fails the assertion if a number is not lower than the specified value. + -- @tparam number actual the number to compare + -- @tparam number limit the lower limit (exclusive) + assertLessThan: (actual, limit) => + @checkArgTypes actual: {actual, "number"}, limit: {limit, "number"} + @assert actual < max, @@msgs.assert.compare, "<", limit, actual + + --- Fails the assertion if a number is not lower than or equal to the specified value. + -- @tparam number actual the number to compare + -- @tparam number limit the lower limit (inclusive) + assertLessThanOrEquals: (actual, limit) => + @checkArgTypes actual: {actual, "number"}, limit: {limit, "number"} + @assert actual <= max, @@msgs.assert.compare, "<=", limit, actual + + --- Fails the assertion if a number is not greater than the specified value. + -- @tparam number actual the number to compare + -- @tparam number limit the upper limit (exclusive) + assertGreaterThan: (actual, limit) => + @checkArgTypes actual: {actual, "number"}, limit: {limit, "number"} + @assert actual > max, @@msgs.assert.compare, ">", limit, actual + + --- Fails the assertion if a number is not greater than or equal to the specified value. + -- @tparam number actual the number to compare + -- @tparam number limit the upper limit (inclusive) + assertGreaterThanOrEquals: (actual, limit) => + @checkArgTypes actual: {actual, "number"}, limit: {limit, "number"} + @assert actual >= max, @@msgs.assert.compare, ">=", limit, actual + + --- Fails the assertion if a number is not in range of an expected value +/- a specified margin. + -- @tparam number actual the actual value + -- @tparam number expected the expected value + -- @tparam[opt=1e-8] number margin the maximum (inclusive) acceptable margin of error + assertAlmostEquals: (actual, expected, margin = 1e-8) => + @checkArgTypes actual: {actual, "number"}, min: {expected, "number"}, max: {margin, "number"} + + margin = math.abs margin + @assert math.abs(actual-expected) <= margin, @@msgs.assert.almostEquals, + expected, margin, actual + + --- Fails the assertion if a number differs from another value at most by a specified margin. + -- Inverse of @{assertAlmostEquals} + -- @tparam number actual the actual value + -- @tparam number value the value being compared against + -- @tparam[opt=1e-8] number margin the maximum (inclusive) margin of error for the numbers to be considered equal + assertNotAlmostEquals: (actual, value, margin = 1e-8) => + @checkArgTypes actual: {actual, "number"}, value: {value, "number"}, max: {margin, "number"} + + margin = math.abs margin + @assert math.abs(actual-expected) > margin, @@msgs.assert.almostEquals, + expected, margin, actual + + --- Fails the assertion if a number is not equal to 0 (zero). + -- @tparam number actual the value + assertZero: (actual) => + @checkArgTypes actual: {actual, "number"} + @assert actual == 0, @@msgs.assert.zero, actual + + --- Fails the assertion if a number is equal to 0 (zero). + -- Inverse of @{assertZero} + -- @tparam number actual the value + assertNotZero: (actual) => + @checkArgTypes actual: {actual, "number"} + @assert actual != 0, @@msgs.assert.notZero + + --- Fails the assertion if a specified number has a fractional component. + -- All numbers in Lua share a common data type, which is usually a double, + -- which is the reason this is not a type check. + -- @tparam number actual the value + assertInteger: (actual) => + @checkArgTypes actual: {actual, "number"} + @assert math.floor(actual) == actual, @@msgs.assert.integer, actual + + --- Fails the assertion if a specified number is less than or equal 0. + -- @tparam number actual the value + -- @tparam[opt=false] boolean includeZero makes the assertion consider 0 to be positive + assertPositive: (actual, includeZero = false) => + @checkArgTypes actual: {actual, "number"}, includeZero: {includeZero, "boolean"} + res = includeZero and actual >= 0 or actual > 0 + @assert res, @@msgs.assert.positiveNegative, "positive", + includeZero and "included" or "excluded" + + --- Fails the assertion if a specified number is greater than or equal 0. + -- @tparam number actual the value + -- @tparam[opt=false] boolean includeZero makes the assertion not fail when a 0 is encountered + assertNegative: (actual, includeZero = false) => + @checkArgTypes actual: {actual, "number"}, includeZero: {includeZero, "boolean"} + res = includeZero and actual <= 0 or actual < 0 + @assert res, @@msgs.assert.positiveNegative, "positive", + includeZero and "included" or "excluded" + + + -- generic asserts + + --- Fails the assertion if a the actual value is not *equal* to the expected value. + -- On the requirements for equality see @{UnitTest:equals} + -- @param actual the actual value + -- @param expected the expected value + assertEquals: (actual, expected) => + @assert self.equals(actual, expected), @@msgs.assert.equals, type(actual), + @logger\dumpToString(actual), type(expected), @logger\dumpToString expected + + --- Fails the assertion if a the actual value is *equal* to the expected value. + -- Inverse of @{UnitTest:assertEquals} + -- @param actual the actual value + -- @param expected the expected value + assertNotEquals: (actual, expected) => + @assert not self.equals(actual, expected), @@msgs.assert.notEquals, + type(actual), @logger\dumpToString expected + + --- Fails the assertion if a the actual value is not *identical* to the expected value. + -- Uses the `==` operator, so in contrast to @{UnitTest:assertEquals}, + -- this assertion compares tables by reference. + -- @param actual the actual value + -- @param expected the expected value + assertIs: (actual, expected) => + @assert actual == expected, @@msgs.assert.is, @format("type", expected), + @format "type", actual + + --- Fails the assertion if a the actual value is *identical* to the expected value. + -- Inverse of @{UnitTest:assertIs} + -- @param actual the actual value + -- @param expected the expected value + assertIsNot: (actual, expected) => + @assert actual != expected, @@msgs.assert.isNot, @format "type", expected + + + -- table asserts + + --- Fails the assertion if the items of one table aren't *equal* to the items of another. + -- Unlike @{UnitTest:assertEquals} this ignores table keys, so e.g. two numerically-keyed tables + -- with equal items in a different order would still be considered equal. + -- By default this assertion only compares values at numerical indexes (see @{UnitTest:itemsEqual} for details). + -- @tparam table actual the first table + -- @tparam table expected the second table + -- @tparam[opt=true] boolean onlyNumericalKeys Disable this option to also compare items with non-numerical keys at the expense of a performance hit. + assertItemsEqual: (actual, expected, onlyNumKeys = true) => + @checkArgTypes { actual: {actual, "table"}, expected: {actual, "table"}, + onlyNumKeys: {onlyNumKeys, "boolean"} + } + + @assert self.itemsEqual(actual, expected, onlyNumKeys), + msgs.assert[onlyNumKeys and "itemsEqualNumericKeys" or "itemsEqualAllKeys"], + @logger\dumpToString(actual), @logger\dumpToString expected + + + --- Fails the assertion if the items of one table aren't *identical* to the items of another. + -- Like @{UnitTest:assertItemsEqual} this ignores table keys, however it compares table items by reference. + -- By default this assertion only compares values at numerical indexes (see @{UnitTest:itemsEqual} for details). + -- @tparam table actual the first table + -- @tparam table expected the second table + -- @tparam[opt=true] boolean onlyNumericalKeys Disable this option to also compare items with non-numerical keys + assertItemsAre: (actual, expected, onlyNumKeys = true) => + @checkArgTypes { actual: {actual, "table"}, expected: {actual, "table"}, + onlyNumKeys: {onlyNumKeys, "boolean"} + } + + @assert self.itemsEqual(actual, expected, onlyNumKeys, nil, true), + msgs.assert[onlyNumKeys and "itemsEqualNumericKeys" or "itemsEqualAllKeys"], + @logger\dumpToString(actual), @logger\dumpToString expected + + --- Fails the assertion if the numerically-keyed items of a table aren't continuous. + -- The rationale for this is that when iterating a table with ipairs or retrieving its length + -- with the # operator, Lua may stop processing the table once the item at index n is nil, + -- effectively hiding any subsequent values + -- @tparam table tbl the table to be checked + assertContinuous: (tbl) => + @checkArgTypes { tbl: {tbl, "table"} } + + realCnt, contCnt = 0, #tbl + for _, v in pairs tbl + if type(v) == "number" and math.floor(v) == v + realCnt += 1 + + @assert realCnt == contCnt, msgs.assert.continuous, contCnt+1, realCnt + + -- string asserts + + --- Fails the assertion if a string doesn't match the specified pattern. + -- Supports both Lua and Regex patterns. + -- @tparam string str the input string + -- @tparam string pattern the pattern to be matched against + -- @tparam[opt=false] boolean useRegex Enable this option to use Regex instead of Lua patterns + -- @tparam[optchain] re.Flags flags Any amount of regex flags as defined by the Aegisub re module + -- (see here for details: http://docs.aegisub.org/latest/Automation/Lua/Modules/re/#flags) + assertMatches: (str, pattern, useRegex = false, ...) => + @checkArgTypes { str: {str, "string"}, pattern: {pattern, "string"}, + useRegex: {useRegex, "boolean"} + } + + match = useRegex and re.match(str, pattern, ...) or str\match pattern, ... + @assert msgs.assert.matches, str, useRegex and "regex" or "Lua", pattern + + --- Fails the assertion if a string doesn't contain a specified substring. + -- Search is case-sensitive by default. + -- @tparam string str the input string + -- @tparam string needle the substring to be found + -- @tparam[opt=true] boolean caseSensitive Disable this option to use locale-dependent case-insensitive comparison. + -- @tparam[opt=1] number init the first byte to start the search at + assertContains: (str, needle, caseSensitive = true, init = 1) => + @checkArgTypes { str: {str, "string"}, needle: {needle, "string"}, + caseSensitive: {caseSensitive, "boolean"}, init: {init, "number"} + } + + _str, _needle = if caseSensitive + str\lower!, needle\lower! + else str, needle + @assert str\find(needle, init, true), str, needle, + caseSensitive and "sensitive" or "insensitive" + + -- function asserts + + + --- Fails the assertion if calling a function with the specified arguments doesn't cause it throw an error. + -- @tparam function func the function to be called + -- @param[opt] args any number of arguments to be passed into the function + assertError: (func, ...) => + @checkArgTypes { func: {func, "function"} } + + res = table.pack pcall func, ... + retCnt, success = res.n, table.remove res, 1 + res.n = nil + @assert success == false, msgs.assert.error, retCnt, @logger\dumpToString res + return res[1] + + --- Fails the assertion if a function call doesn't cause an error message that matches the specified pattern. + -- Supports both Lua and Regex patterns. + -- @tparam function func the function to be called + -- @tparam[opt={}] table args a table of any number of arguments to be passed into the function + -- @tparam string pattern the pattern to be matched against + -- @tparam[opt=false] boolean useRegex Enable this option to use Regex instead of Lua patterns + -- @tparam[optchain] re.Flags flags Any amount of regex flags as defined by the Aegisub re module + -- (see here for details: http://docs.aegisub.org/latest/Automation/Lua/Modules/re/#flags) + assertErrorMsgMatches: (func, params = {}, pattern, useRegex = false, ...) => + @checkArgTypes { func: {func, "function"}, params: {params, "table"}, + pattern: {pattern, "string"}, useRegex: {useRegex, "boolean"} + } + msg = @assertError func, unpack params + + match = useRegex and re.match(msg, pattern, ...) or msg\match pattern, ... + @assert msgs.assert.errorMsgMatches, msg, useRegex and "regex" or "Lua", pattern + + +--- A special case of the UnitTest class for a setup routine +-- @classmod UnitTestSetup +class UnitTestSetup extends UnitTest + --- Runs the setup routine. + -- Only the @{UnitTestSetup} object is passed into the function. + -- Values returned by the setup routine are stored to be passed into the test functions later. + -- @treturn[1] boolean true (test succeeded) + -- @treturn[1] table retVals all values returned by the function packed into a table + -- @treturn[2] boolean false (test failed) + -- @treturn[2] string the error message describing how the test failed + run: => + @logger\logEx nil, @@msgs.run.setup, false + + res = table.pack pcall @f, @ + @success = table.remove res, 1 + @logResult res[1] + + if @success + @retVals = res + return true, @retVals + + return false, @errMsg + +--- A special case of the UnitTest class for a teardown routine +-- @classmod UnitTestTeardown +class UnitTestTeardown extends UnitTest + --- Formats and writes a "running test x" message to the log. + -- @local + logStart: => + @logger\logEx nil, @@msgs.run.teardown, false + + +--- Holds a unit test class, i.e. a group of unit tests with common setup and teardown routines +-- @classmod UnitTestClass +class UnitTestClass + msgs = { + run: { + runningTests: "Running test class '%s' (%d tests)..." + setupFailed: "Setup for test class '%s' FAILED, skipping tests." + abort: "Test class '%s' FAILED after %d tests, aborting." + testsFailed: "Done testing class '%s'. FAILED %d of %d tests." + success: "Test class '%s' completed successfully." + testNotFound: "Couldn't find requested test '%s'." + } + } + + --- Creates a new unit test class complete with a number of unit test as well as optional setup and teardown. + -- Instead of calling this constructor directly, it is recommended to call @{UnitTestSuite:new} instead, + -- which takes a table of test functions and creates test classes automatically. + -- @tparam string name a descriptive name for the test class + -- @tparam[opt={}] {[string] = function|table, ...} args a table of test functions by name; + -- indexes starting with "_" have special meaning and are not added as regular tests: + -- * _setup: a @{UnitTestSetup} routine + -- * _teardown: a @{UnitTestTeardown} routine + -- * _order: alternative syntax to the order parameter (see below) + -- @tparam [opt=nil (unordered)] {string, ...} An list of test names in the desired execution order. + -- Only tests mentioned in this table will be performed when running the whole test class. + -- If unspecified, all tests will be run in random order. + new: (@name, args = {}, @order, @testSuite) => + @logger = @testSuite.logger + @setup = UnitTestSetup "setup", args._setup, @ + @teardown = UnitTestTeardown "teardown", args._teardown, @ + @description = args._description + @order or= args._order + @tests = [UnitTest(name, f, @) for name, f in pairs args when "_" != name\sub 1,1] + + --- Runs all tests in the unit test class in the specified order. + -- @param[opt=false] abortOnFail stops testing once a test fails + -- @param[opt=(default)] overrides the default test order + -- @treturn[1] boolean true (test class succeeded) + -- @treturn[2] boolean false (test class failed) + -- @treturn[2] {@{UnitTest}, ...} a list of unit test that failed + run: (abortOnFail, order = @order) => + tests, failed = @tests, {} + if order + tests, mappings = {}, {test.name, test for test in *@tests} + for i, name in ipairs order + @logger\assert mappings[name], msgs.run.testNotFound, name + tests[i] = mappings[name] + testCnt, failedCnt = #tests, 0 + + @logger\log msgs.run.runningTests, @name, testCnt + @logger.indent += 1 + + success, res = @setup\run! + -- failing the setup always aborts + unless success + @logger.indent -= 1 + @logger\warn msgs.run.setupFailed, @name + return false, -1 + + for i, test in pairs tests + unless test\run unpack res + failedCnt += 1 + failed[#failed+1] = test + if abortOnFail + @logger.indent -= 1 + @logger\warn msgs.run.abort, @name, i + return false, failed + + @logger.indent -= 1 + @success = failedCnt == 0 + + if @success + @logger\log msgs.run.success, @name + return true + + @logger\log msgs.run.testsFailed, @name, failedCnt, testCnt + return false, failed + + +--- A DependencyControl unit test suite. +-- Your test file/module must reteturn a UnitTestSuite object in order to be recognized as a test suite. +class UnitTestSuite + msgs = { + run: { + running: "Running %d test classes for %s... " + aborted: "Aborting after %d test classes... " + classesFailed: "FAILED %d of %d test classes." + success: "All tests completed successfully." + classNotFound: "Couldn't find requested test class '%s'." + } + registerMacros: { + allDesc: "Runs the whole test suite." + } + new: { + badClassesType: "Test classes must be passed in either as a table or an import function, got a %s" + } + import: { + noTableReturned: "The test import function must return a table of test classes, got a %s." + } + } + + @UnitTest = UnitTest + @UnitTestClass = UnitTestClass + @testDir = {macros: aegisub.decode_path("?user/automation/tests/DepUnit/macros"), + modules: aegisub.decode_path("?user/automation/tests/DepUnit/modules")} + + --- Creates a complete unit test suite for a module or automation script. + -- Using this constructor will create all test classes and tests automatically. + -- @tparam string namespace the namespace of the module or automation script to test. + -- @tparam {[string] = table, ...}|function(self, dependencies, args...) args To create a UnitTest suite, + -- you must supply a hashtable of @{UnitTestClass} constructor tables by name. You can either do so directly, + -- or wrap it in a function that takes a number of arguments depending on how the tests are registered: + -- * self: the module being testsed (skipped for automation scripts) + -- * dependencies: a numerically keyed table of all the modules required by the tested script/module (in order) + -- * args: any additional arguments passed into the @{DependencyControl\registerTests} function. + -- Doing so is required to test automation scripts as well as module functions not exposed by its API. + -- indexes starting with "_" have special meaning and are not added as regular tests: + -- * _order: alternative syntax to the order parameter (see below) + -- @tparam [opt=nil (unordered)] {string, ...} An list of test class names in the desired execution order. + -- Only test classes mentioned in this table will be performed when running the whole test suite. + -- If unspecified, all test classes will be run in random order. + new: (@namespace, classes, @order) => + @logger = Logger defaultLevel: 3, fileBaseName: @namespace, fileSubName: "UnitTests", toFile: true + @classes = {} + switch type classes + when "table" then @addClasses classes + when "function" then @importFunc = classes + else @logger\error msgs.new.badClassesType, type classes + + --- Constructs test classes and adds them to the suite. + -- Use this if you need to add additional test classes to an existing @{UnitTestSuite} object. + -- @tparam {[string] = table, ...} args a hashtable of @{UnitTestClass} constructor tables by name. + addClasses: (classes) => + @classes[#@classes+1] = UnitTestClass(name, args, args._order, @) for name, args in pairs classes when "_" != name\sub 1,1 + if classes._order + @order or= {} + @order[#@order+1] = clsName for clsName in *classes._order + + --- Imports test classes from a function (passing in the specified arguments) and adds them to the suite. + -- Use this if you need to add additional test classes to an existing @{UnitTestSuite} object. + -- @tparam [opt] args a hashtable of @{UnitTestClass} constructor tables by name. + import: (...) => + return false unless @importFunc + classes = self.importFunc ... + @logger\assert type(classes) == "table", msgs.import.noTableReturned, type classes + @addClasses classes + @importFunc = nil + + --- Registers macros for running all or specific test classes of this suite. + -- If the test script is placed in the appropriate directory (according to module/automation script namespace), + -- this is automatically handled by DependencyControl. + registerMacros: => + menuItem = {"DependencyControl", "Run Tests", @name or @namespace, "[All]"} + aegisub.register_macro table.concat(menuItem, "/"), msgs.registerMacros.allDesc, -> @run! + for cls in *@classes + menuItem[4] = cls.name + aegisub.register_macro table.concat(menuItem, "/"), cls.description, -> cls\run! + + --- Runs all test classes of this suite in the specified order. + -- @param[opt=false] abortOnFail stops testing once a test fails + -- @param[opt=(default)] overrides the default test order + -- @treturn[1] boolean true (test class succeeded) + -- @treturn[2] boolean false (test class failed) + -- @treturn[2] {@{UnitTest}, ...} a list of unit test that failed + run: (abortOnFail, order = @order) => + classes, allFailed = @classes, {} + if order + classes, mappings = {}, {cls.name, cls for cls in *@classes} + for i, name in ipairs order + @logger\assert mappings[name], msgs.run.classNotFound, name + classes[i] = mappings[name] + + classCnt, failedCnt = #classes, 0 + @logger\log msgs.run.running, classCnt, @namespace + @logger.indent += 1 + + for i, cls in pairs classes + success, failed = cls\run abortOnFail + unless success + failedCnt += 1 + allFailed[#allFailed+1] = test for test in *failed + if abortOnFail + @logger.indent -= 1 + @logger\warn msgs.run.abort, i + return false, allFailed + + @logger.indent -= 1 + @success = failedCnt == 0 + if @success + @logger\log msgs.run.success + else @logger\log msgs.run.classesFailed, failedCnt, classCnt + return @success, failedCnt > 0 and allFailed or nil \ No newline at end of file diff --git a/modules/DependencyControl/UpdateFeed.moon b/modules/DependencyControl/UpdateFeed.moon index 4fce367..9bc29a8 100644 --- a/modules/DependencyControl/UpdateFeed.moon +++ b/modules/DependencyControl/UpdateFeed.moon @@ -1,238 +1,238 @@ -Logger = require "l0.DependencyControl.Logger" -ffi = require "ffi" -DownloadManager = require "DM.DownloadManager" - -class ScriptUpdateRecord - platform = "#{ffi.os}-#{ffi.arch}" - msgs = { - errors: { - noActiveChannel: "No active channel." - } - changelog: { - header: "Changelog for %s v%s (released %s):" - verTemplate: "v %s:" - msgTemplate: " • %s" - } - } - @logger = Logger fileBaseName: @@__name - - new: (@namespace, @data, @config = {c:{}}, isModule, autoChannel = true, @@logger = @@logger) => - @moduleName = isModule and @namespace - @[k] = v for k, v in pairs data - @setChannel! if autoChannel - - - getChannels: => - channels, default = {} - for name, channel in pairs @data.channels - channels[#channels+1] = name - if channel.default and not default - default = name - - return channels, default - - setChannel: (channelName = @config.c.activeChannel) => - with @config.c - .channels, default = @getChannels! - .lastChannel or= channelName or default - channelData = @data.channels[.lastChannel] - @activeChannel = .lastChannel - return false, @activeChannel unless channelData - @[k] = v for k, v in pairs channelData - - @files = @files and [file for file in *@files when not file.platform or file.platform == platform] or {} - return true, @activeChannel - - checkPlatform: => - @@logger\assert @activeChannel, msgs.errors.noActiveChannel - return not @platforms or ({p,true for p in *@platforms})[platform], platform - - getChangelog: (versionRecord, minVer = 0) => - return "" unless "table" == type @changelog - maxVer = versionRecord\getVersionNumber @version - minVer = versionRecord\getVersionNumber minVer - - changelog = {} - for ver, entry in pairs @changelog - ver = versionRecord\getVersionNumber ver - verStr = versionRecord\getVersionString ver - if ver >= minVer and ver <= maxVer - changelog[#changelog+1] = {ver, verStr, entry} - - return "" if #changelog == 0 - table.sort changelog, (a,b) -> a[1]>b[1] - - msg = {msgs.changelog.header\format @name, versionRecord\getVersionString(@version), @released or ""} - for chg in *changelog - chg[3] = {chg[3]} if type(chg[3]) ~= "table" - if #chg[3] > 0 - msg[#msg+1] = @@logger\format msgs.changelog.verTemplate, 1, chg[2] - msg[#msg+1] = @@logger\format(msgs.changelog.msgTemplate, 1, entry) for entry in *chg[3] - - return table.concat msg, "\n" - -class UpdateFeed - templateData = { - maxDepth: 7, - templates: { - feedName: {depth: 1, order: 1, key: "name" } - baseUrl: {depth: 1, order: 2, key: "baseUrl" } - feed: {depth: 1, order: 3, key: "knownFeeds", isHashTable: true } - namespace: {depth: 3, order: 1, parentKeys: {macros:true, modules:true} } - namespacePath: {depth: 3, order: 2, parentKeys: {macros:true, modules:true}, repl:"%.", to: "/" } - scriptName: {depth: 3, order: 3, key: "name" } - channel: {depth: 5, order: 1, parentKeys: {channels:true} } - version: {depth: 5, order: 2, key: "version" } - platform: {depth: 7, order: 1, key: "platform" } - fileName: {depth: 7, order: 2, key: "name" } - -- rolling templates - fileBaseUrl: {key: "fileBaseUrl", rolling: true } - } - sourceAt: {} - } - - msgs = { - trace: { - usingCached: "Using cached feed." - downloaded: "Downloaded feed to %s." - } - errors: { - downloadAdd: "Couldn't initiate download of %s to %s (%s)." - downloadFailed: "Download of feed %s to %s failed (%s)." - cantOpen: "Can't open downloaded feed for reading (%s)." - parse: "Error parsing feed." - } - } - - -- default settings - @logger = Logger fileBaseName: @@__name - @downloadPath = aegisub.decode_path "?temp/l0.#{@@__name}_feedCache" - @fileBaseName = "l0.#{@@__name}_" - @fileMatchTemplate = "l0.#{@@__name}_%x%x%x%x.*%.json" - @dumpExpanded = false - - @cache = {} - dlm = DownloadManager aegisub.decode_path @downloadPath - feedsHaveBeenTrimmed = false - - -- precalculate some tables for the templater - templateData.rolling = {n, true for n,t in pairs templateData.templates when t.rolling} - templateData.sourceKeys = {t.key, t.depth for n,t in pairs templateData.templates when t.key} - with templateData - for i=1,.maxDepth - .sourceAt[i], j = {}, 1 - for name, tmpl in pairs .templates - if tmpl.depth==i and not tmpl.rolling - .sourceAt[i][j] = name - j += 1 - table.sort .sourceAt[i], (a,b) -> return .templates[a].order < .templates[b].order - - new: (@url, autoFetch = true, fileName) => - -- delete old feeds - feedsHaveBeenTrimmed or= Logger(fileMatchTemplate: @@fileMatchTemplate, logDir: @@downloadPath, maxFiles: 20)\trimFiles! - - @fileName = fileName or table.concat {@@downloadPath, @@fileBaseName, "%04X"\format(math.random 0, 16^4-1), ".json"} - if @@cache[@url] - @@logger\trace msgs.trace.usingCached - @data = @@cache[@url] - elseif autoFetch - @fetch! - - getKnownFeeds: => - return {} unless @data - return [url for _, url in pairs @data.knownFeeds] - -- TODO: maybe also search all requirements for feed URLs - - fetch: (fileName) => - @fileName = fileName if fileName - - dl, err = dlm\addDownload @url, @fileName - unless dl - return false, msgs.errors.downloadAdd\format @url, @fileName, err - - dlm\waitForFinish -> true - if dl.error - return false, msgs.errors.downloadFailed\format @url, @fileName, dl.error - - @@logger\trace msgs.trace.downloaded, @fileName - - handle, err = io.open @fileName - unless handle - return false, msgs.errors.cantOpen\format err - - decoded, data = pcall json.decode, handle\read "*a" - unless decoded and data - -- luajson errors are useless dumps of whatever, no use to pass them on to the user - return false, msgs.errors.parse - - data[key] = {} for key in *{"macros", "modules", "knownFeeds"} when not data[key] - @data, @@cache[@url] = data, data - @expand! - return @data - - expand: => - {:templates, :maxDepth, :sourceAt, :rolling, :sourceKeys} = templateData - vars, rvars = {}, {i, {} for i=0, maxDepth} - - expandTemplates = (val, depth, rOff=0) -> - return switch type val - when "string" - val = val\gsub "@{(.-):(.-)}", (name, key) -> - if type(vars[name]) == "table" or type(rvars[depth+rOff]) == "table" - vars[name][key] or rvars[depth+rOff][name][key] - val\gsub "@{(.-)}", (name) -> vars[name] or rvars[depth+rOff][name] - when "table" - {k, expandTemplates v, depth, rOff for k, v in pairs val} - else val - - - recurse = (obj, depth = 1, parentKey = "", upKey = "") -> - -- collect regular template variables first - for name in *sourceAt[depth] - with templates[name] - if not .key - -- template variables are not expanded if they are keys - vars[name] = parentKey if .parentKeys[upKey] - elseif .key and obj[.key] - -- expand other templates used in template variable - obj[.key] = expandTemplates obj[.key], depth - vars[name] = obj[.key] - vars[name] = vars[name]\gsub(.repl, .to) if .repl - - -- update rolling template variables last - for name,_ in pairs rolling - rvars[depth][name] = obj[templates[name].key] or rvars[depth-1][name] or "" - rvars[depth][name] = expandTemplates rvars[depth][name], depth, -1 - obj[templates[name].key] and= rvars[depth][name] - - -- expand variables in non-template strings and recurse tables - for k,v in pairs obj - if sourceKeys[k] ~= depth and not rolling[k] - switch type v - when "string" - obj[k] = expandTemplates obj[k], depth - when "table" - recurse v, depth+1, k, parentKey - -- invalidate template variables created at depth+1 - vars[name] = nil for name in *sourceAt[depth+1] - rvars[depth+1] = {} - - recurse @data - - if @@dumpExpanded - handle = io.open @fileName\gsub(".json$", ".exp.json"), "w" - handle\write(json.encode @data)\close! - - return @data - - getScript: (namespace, isModule, config, autoChannel) => - section = isModule and "modules" or "macros" - scriptData = @data[section][namespace] - return false unless scriptData - ScriptUpdateRecord namespace, scriptData, config, isModule, autoChannel - - getMacro: (namespace, config, autoChannel) => - @getScript namespace, false, config, autoChannel - - getModule: (namespace, config, autoChannel) => +Logger = require "l0.DependencyControl.Logger" +ffi = require "ffi" +DownloadManager = require "DM.DownloadManager" + +class ScriptUpdateRecord + platform = "#{ffi.os}-#{ffi.arch}" + msgs = { + errors: { + noActiveChannel: "No active channel." + } + changelog: { + header: "Changelog for %s v%s (released %s):" + verTemplate: "v %s:" + msgTemplate: " • %s" + } + } + @logger = Logger fileBaseName: @@__name + + new: (@namespace, @data, @config = {c:{}}, isModule, autoChannel = true, @@logger = @@logger) => + @moduleName = isModule and @namespace + @[k] = v for k, v in pairs data + @setChannel! if autoChannel + + + getChannels: => + channels, default = {} + for name, channel in pairs @data.channels + channels[#channels+1] = name + if channel.default and not default + default = name + + return channels, default + + setChannel: (channelName = @config.c.activeChannel) => + with @config.c + .channels, default = @getChannels! + .lastChannel or= channelName or default + channelData = @data.channels[.lastChannel] + @activeChannel = .lastChannel + return false, @activeChannel unless channelData + @[k] = v for k, v in pairs channelData + + @files = @files and [file for file in *@files when not file.platform or file.platform == platform] or {} + return true, @activeChannel + + checkPlatform: => + @@logger\assert @activeChannel, msgs.errors.noActiveChannel + return not @platforms or ({p,true for p in *@platforms})[platform], platform + + getChangelog: (versionRecord, minVer = 0) => + return "" unless "table" == type @changelog + maxVer = versionRecord\getVersionNumber @version + minVer = versionRecord\getVersionNumber minVer + + changelog = {} + for ver, entry in pairs @changelog + ver = versionRecord\getVersionNumber ver + verStr = versionRecord\getVersionString ver + if ver >= minVer and ver <= maxVer + changelog[#changelog+1] = {ver, verStr, entry} + + return "" if #changelog == 0 + table.sort changelog, (a,b) -> a[1]>b[1] + + msg = {msgs.changelog.header\format @name, versionRecord\getVersionString(@version), @released or ""} + for chg in *changelog + chg[3] = {chg[3]} if type(chg[3]) ~= "table" + if #chg[3] > 0 + msg[#msg+1] = @@logger\format msgs.changelog.verTemplate, 1, chg[2] + msg[#msg+1] = @@logger\format(msgs.changelog.msgTemplate, 1, entry) for entry in *chg[3] + + return table.concat msg, "\n" + +class UpdateFeed + templateData = { + maxDepth: 7, + templates: { + feedName: {depth: 1, order: 1, key: "name" } + baseUrl: {depth: 1, order: 2, key: "baseUrl" } + feed: {depth: 1, order: 3, key: "knownFeeds", isHashTable: true } + namespace: {depth: 3, order: 1, parentKeys: {macros:true, modules:true} } + namespacePath: {depth: 3, order: 2, parentKeys: {macros:true, modules:true}, repl:"%.", to: "/" } + scriptName: {depth: 3, order: 3, key: "name" } + channel: {depth: 5, order: 1, parentKeys: {channels:true} } + version: {depth: 5, order: 2, key: "version" } + platform: {depth: 7, order: 1, key: "platform" } + fileName: {depth: 7, order: 2, key: "name" } + -- rolling templates + fileBaseUrl: {key: "fileBaseUrl", rolling: true } + } + sourceAt: {} + } + + msgs = { + trace: { + usingCached: "Using cached feed." + downloaded: "Downloaded feed to %s." + } + errors: { + downloadAdd: "Couldn't initiate download of %s to %s (%s)." + downloadFailed: "Download of feed %s to %s failed (%s)." + cantOpen: "Can't open downloaded feed for reading (%s)." + parse: "Error parsing feed." + } + } + + -- default settings + @logger = Logger fileBaseName: @@__name + @downloadPath = aegisub.decode_path "?temp/l0.#{@@__name}_feedCache" + @fileBaseName = "l0.#{@@__name}_" + @fileMatchTemplate = "l0.#{@@__name}_%x%x%x%x.*%.json" + @dumpExpanded = false + + @cache = {} + dlm = DownloadManager aegisub.decode_path @downloadPath + feedsHaveBeenTrimmed = false + + -- precalculate some tables for the templater + templateData.rolling = {n, true for n,t in pairs templateData.templates when t.rolling} + templateData.sourceKeys = {t.key, t.depth for n,t in pairs templateData.templates when t.key} + with templateData + for i=1,.maxDepth + .sourceAt[i], j = {}, 1 + for name, tmpl in pairs .templates + if tmpl.depth==i and not tmpl.rolling + .sourceAt[i][j] = name + j += 1 + table.sort .sourceAt[i], (a,b) -> return .templates[a].order < .templates[b].order + + new: (@url, autoFetch = true, fileName) => + -- delete old feeds + feedsHaveBeenTrimmed or= Logger(fileMatchTemplate: @@fileMatchTemplate, logDir: @@downloadPath, maxFiles: 20)\trimFiles! + + @fileName = fileName or table.concat {@@downloadPath, @@fileBaseName, "%04X"\format(math.random 0, 16^4-1), ".json"} + if @@cache[@url] + @@logger\trace msgs.trace.usingCached + @data = @@cache[@url] + elseif autoFetch + @fetch! + + getKnownFeeds: => + return {} unless @data + return [url for _, url in pairs @data.knownFeeds] + -- TODO: maybe also search all requirements for feed URLs + + fetch: (fileName) => + @fileName = fileName if fileName + + dl, err = dlm\addDownload @url, @fileName + unless dl + return false, msgs.errors.downloadAdd\format @url, @fileName, err + + dlm\waitForFinish -> true + if dl.error + return false, msgs.errors.downloadFailed\format @url, @fileName, dl.error + + @@logger\trace msgs.trace.downloaded, @fileName + + handle, err = io.open @fileName + unless handle + return false, msgs.errors.cantOpen\format err + + decoded, data = pcall json.decode, handle\read "*a" + unless decoded and data + -- luajson errors are useless dumps of whatever, no use to pass them on to the user + return false, msgs.errors.parse + + data[key] = {} for key in *{"macros", "modules", "knownFeeds"} when not data[key] + @data, @@cache[@url] = data, data + @expand! + return @data + + expand: => + {:templates, :maxDepth, :sourceAt, :rolling, :sourceKeys} = templateData + vars, rvars = {}, {i, {} for i=0, maxDepth} + + expandTemplates = (val, depth, rOff=0) -> + return switch type val + when "string" + val = val\gsub "@{(.-):(.-)}", (name, key) -> + if type(vars[name]) == "table" or type(rvars[depth+rOff]) == "table" + vars[name][key] or rvars[depth+rOff][name][key] + val\gsub "@{(.-)}", (name) -> vars[name] or rvars[depth+rOff][name] + when "table" + {k, expandTemplates v, depth, rOff for k, v in pairs val} + else val + + + recurse = (obj, depth = 1, parentKey = "", upKey = "") -> + -- collect regular template variables first + for name in *sourceAt[depth] + with templates[name] + if not .key + -- template variables are not expanded if they are keys + vars[name] = parentKey if .parentKeys[upKey] + elseif .key and obj[.key] + -- expand other templates used in template variable + obj[.key] = expandTemplates obj[.key], depth + vars[name] = obj[.key] + vars[name] = vars[name]\gsub(.repl, .to) if .repl + + -- update rolling template variables last + for name,_ in pairs rolling + rvars[depth][name] = obj[templates[name].key] or rvars[depth-1][name] or "" + rvars[depth][name] = expandTemplates rvars[depth][name], depth, -1 + obj[templates[name].key] and= rvars[depth][name] + + -- expand variables in non-template strings and recurse tables + for k,v in pairs obj + if sourceKeys[k] ~= depth and not rolling[k] + switch type v + when "string" + obj[k] = expandTemplates obj[k], depth + when "table" + recurse v, depth+1, k, parentKey + -- invalidate template variables created at depth+1 + vars[name] = nil for name in *sourceAt[depth+1] + rvars[depth+1] = {} + + recurse @data + + if @@dumpExpanded + handle = io.open @fileName\gsub(".json$", ".exp.json"), "w" + handle\write(json.encode @data)\close! + + return @data + + getScript: (namespace, isModule, config, autoChannel) => + section = isModule and "modules" or "macros" + scriptData = @data[section][namespace] + return false unless scriptData + ScriptUpdateRecord namespace, scriptData, config, isModule, autoChannel + + getMacro: (namespace, config, autoChannel) => + @getScript namespace, false, config, autoChannel + + getModule: (namespace, config, autoChannel) => @getScript namespace, true, config, autoChannel \ No newline at end of file diff --git a/modules/DependencyControl/Updater.moon b/modules/DependencyControl/Updater.moon index 294b357..c3c5806 100644 --- a/modules/DependencyControl/Updater.moon +++ b/modules/DependencyControl/Updater.moon @@ -1,514 +1,514 @@ -UpdateFeed = require "l0.DependencyControl.UpdateFeed" -fileOps = require "l0.DependencyControl.FileOps" -Logger = require "l0.DependencyControl.Logger" -DownloadManager = require "DM.DownloadManager" -PreciseTimer = require "PT.PreciseTimer" - -class UpdaterBase - @logger = Logger fileBaseName: @@__name - @config = nil -- set on creation of the Updater - msgs = { - updateError: { - [0]: "Couldn't %s %s '%s' because of a paradox: module not found but updater says up-to-date (%s)" - [1]: "Couldn't %s %s '%s' because the updater is disabled." - [2]: "Skipping %s of %s '%s': namespace '%s' doesn't conform to rules." - [3]: "Skipping %s of unmanaged %s '%s'." - [4]: "No remaining feed available to %s %s '%s' from." - [6]: "The %s of %s '%s' failed because no suitable package could be found %s." - [5]: "Skipped %s of %s '%s': Another update initiated by %s is already running." - [7]: "Skipped %s of %s '%s': An internet connection is currently not available." - [10]: "Skipped %s of %s '%s': the update task is already running." - [15]: "Couldn't %s %s '%s' because its requirements could not be satisfied:" - [30]: "Couldn't %s %s '%s': failed to create temporary download directory %s" - [35]: "Aborted %s of %s '%s' because the feed contained a missing or malformed SHA-1 hash for file %s." - [50]: "Couldn't finish %s of %s '%s' because some files couldn't be moved to their target location:\n" - [55]: "%s of %s '%s' succeeded, couldn't be located by the module loader." - [56]: "%s of %s '%s' succeeded, but an error occured while loading the module:\n%s" - [57]: "%s of %s '%s' succeeded, but it's missing a version record." - [58]: "%s of unmanaged %s '%s' succeeded, but an error occured while creating a DependencyControl record: %s" - [100]: "Error (%d) in component %s during %s of %s '%s':\n— %s" - } - updaterErrorComponent: {"DownloadManager (adding download)", "DownloadManager"} - } - - getUpdaterErrorMsg: (code, name, ...) => - args = {...} - if code <= -100 - -- Generic downstream error - -- VarArgs: 1: isModule, 2: isFetch, 3: error msg - return msgs.updateError[100]\format -code, msgs.updaterErrorComponent[math.floor(-code/100)], - args[2] and "fetch" or "update", args[1] and "module" or "macro", name, args[3] - else - -- Updater error: - -- VarArgs: 1: isModule, 2: isFetch, 3: additional information - return msgs.updateError[-code]\format args[2] and "fetch" or "update", - args[1] and "module" or "macro", - name, args[3] - -class UpdateTask extends UpdaterBase - DependencyControl = nil - dlm = DownloadManager! - msgs = { - checkFeed: { - downloadFailed: "Failed to download feed: %s" - noData: "The feed doesn't have any update information for %s '%s'." - badChannel: "The specified update channel '%s' wasn't present in the feed." - invalidVersion: "The feed contains an invalid version record for %s '%s' (channel: %s): %s." - unsupportedPlatform: "No download available for your platform '%s' (channel: %s)." - noFiles: "No files available to download for your platform '%s' (channel: %s)." - } - run: { - starting: "Starting %s of %s '%s'... " - fetching: "Trying to %sfetch missing %s '%s'..." - feedCandidates: "Trying %d candidate feeds (%s mode)..." - feedTrying: "Checking feed %d/%d (%s)..." - upToDate: "%s '%s' is up-to-date (v%s)." - alreadyUpdated: "%s v%s has already been installed." - noFeedAvailExt: "(required: %s; installed: %s; available: %s)" - noUpdate: "Feed has no new update." - skippedOptional: "Skipped %s of optional dependency '%s': %s" - optionalNoFeed: "No feed available to download module from." - optionalNoUpdate: "No suitable download could be found %s." - } - - performUpdate: { - updateReqs: "Checking requirements..." - updateReady: "Update ready. Using temporary directory '%s'." - fileUnchanged: "Skipped unchanged file '%s'." - fileAddDownload: "Added Download %s ==> '%s'." - filesDownloading: "Downloading %d files..." - movingFiles: "Downloads complete. Now moving files to Aegisub automation directory '%s'..." - movedFile: "Moved '%s' ==> '%s'." - moveFileFailed: "Failed to move '%s' ==> '%s': %s" - updSuccess: "%s of %s '%s' (v%s) complete." - reloadNotice: "Please rescan your autoload directory for the changes to take effect." - unknownType: "Skipping file '%s': unknown type '%s'." - } - refreshRecord: { - unsetVirtual: "Update initated by another macro already fetched %s '%s', switching to update mode." - otherUpdate: "Update initated by another macro already updated %s '%s' to v%s." - } - } - - new: (@record, targetVersion = 0, @addFeeds, @exhaustive, @channel, @optional, @updater) => - DependencyControl or= require "l0.DependencyControl" - assert @record.__class == DependencyControl, "First parameter must be a #{DependencyControl.__name} object." - - @triedFeeds = {} - @status = nil - @targetVersion = @record\getVersionNumber targetVersion - - return nil, -1 unless @@config.c.updaterEnabled - return nil, -2 unless @record\validateNamespace! - - set: (targetVersion, @addFeeds, @exhaustive, @channel, @optional) => - @targetVersion = @record\getVersionNumber targetVersion - return @ - - checkFeed: (feedUrl) => - -- get feed contents - feed = UpdateFeed feedUrl, false - unless feed.data -- no cached data available, perform download - success, err = feed\fetch! - unless success - return nil, msgs.checkFeed.downloadFailed\format err - - -- select our script and update channel - updateRecord = feed\getScript @record.namespace, @record.moduleName, @record.config, false - unless updateRecord - return nil, msgs.checkFeed.noData\format @record.moduleName and "module" or "macro", @record.name - - success, currentChannel = updateRecord\setChannel @channel - unless success - return nil, msgs.checkFeed.badChannel\format currentChannel - - -- check if an update is available and satisfies our requirements - res, version = @record\checkVersion updateRecord.version - if res == nil - return nil, msgs.checkFeed.invalidVersion\format @record.moduleName and "module" or "macro", - @record.name, currentChannel, tostring updateRecord.version - elseif res or @targetVersion > version - return false, nil, version - - -- check if our platform is supported/files are available to download - res, platform = updateRecord\checkPlatform! - unless res - return nil, msgs.checkFeed.unsupportedPlatform\format platform, currentChannel - if #updateRecord.files == 0 - return nil, msgs.checkFeed.noFiles\format platform, currentChannel - - return true, updateRecord, version - - - run: (waitLock, exhaustive = @@config.c.tryAllFeeds or @@exhaustive) => - logUpdateError = (code, extErr, virtual = @virtual) -> - if code < 0 - @@logger\log @getUpdaterErrorMsg code, @record.name, @record.moduleName, virtual, extErr - return code, extErr - - with @record do @@logger\log msgs.run.starting, .virtual and "download" or "update", - .moduleName and "module" or "macro", .name - - -- don't perform update of a script when another one is already running for the same script - return logUpdateError -10 if @running - - -- check if the script was already updated - if @updated and not exhaustive and @record\checkVersion @targetVersion - @@logger\log msgs.run.alreadyUpdated, @record.name, @record\getVersionString! - return 2 - - -- build feed list - userFeed, haveFeeds, feeds = @record.config.c.userFeed, {}, {} - if userFeed and not @triedFeeds[userFeed] - feeds[1] = userFeed - else - unless @triedFeeds[@record.feed] or haveFeeds[@record.feed] - feeds[1] = @record.feed - for feed in *@addFeeds - unless @triedFeeds[feed] or haveFeeds[feed] - feeds[#feeds+1] = feed - - for feed in *@@config.c.extraFeeds - unless @triedFeeds[feed] or haveFeeds[feed] - feeds[#feeds+1] = feed - - if #feeds == 0 - if @optional - @@logger\log msgs.run.skippedOptional, @record.name, - @record.virtual and "download" or "update", msgs.run.optionalNoFeed - return 3 - - return logUpdateError -4 - - -- check internet connection - return logUpdateError -7 unless dlm\isInternetConnected! - - -- get a lock on the updater - success, otherHost = @updater\getLock waitLock - return logUpdateError -5, otherHost unless success - - -- check feeds for update until we find and update or run out of feeds to check - -- normal mode: check feeds until an update matching the required version is found - -- exhaustive mode: check all feeds for updates and pick the highest version - - @@logger\log msgs.run.feedCandidates, #feeds, exhaustive and "exhaustive" or "normal" - @@logger.indent += 1 - - maxVer, updateRecord = 0 - for i, feed in ipairs feeds - @@logger\log msgs.run.feedTrying, i, #feeds, feed - - res, rec, version = @checkFeed feed - @triedFeeds[feed] = true - if res == nil - @@logger\log rec - elseif version > maxVer - maxVer = version - if res - updateRecord = rec - break unless exhaustive - else @@logger\trace msgs.run.noUpdate - else - @@logger\trace msgs.run.noUpdate - - @@logger.indent -= 1 - - local code, res - wasVirtual = @record.virtual - unless updateRecord - -- for a script to be marked up-to-date it has to installed on the user's system - -- and the version must at least be that returned by at least one feed - if maxVer>0 and not @record.virtual and @targetVersion <= @record.version - @@logger\log msgs.run.upToDate, @record.moduleName and "Module" or "Macro", - @record.name, @record\getVersionString! - return 0 - - res = msgs.run.noFeedAvailExt\format @targetVersion == 0 and "any" or @record\getVersionString(@targetVersion), - @record.virtual and "no" or @record\getVersionString!, - maxVer<1 and "none" or @record\getVersionString maxVer - - if @optional - @@logger\log msgs.run.skippedOptional, @record.name, @record.virtual and "download" or "update", - msgs.run.optionalNoUpdate\format res - return 3 - - return logUpdateError -6, res - - code, res = @performUpdate updateRecord - return logUpdateError code, res, wasVirtual - - performUpdate: (update) => - finish = (...) -> - @running = false - if @record.virtual or @record.unmanaged - @record\removeDummyRef! - return ... - - -- don't perform update of a script when another one is already running for the same script - return finish -10 if @running - @running = true - - -- set a dummy ref (which hasn't yet been set for virtual and unmanaged modules) - -- and record version to allow resolving circular dependencies - if @record.virtual or @record.unmanaged - @record\createDummyRef! - @record\setVersion update.version - - -- try to load required modules first to see if all dependencies are satisfied - -- this may trigger more updates - reqs = update.requiredModules - if reqs and #reqs > 0 - @@logger\log msgs.performUpdate.updateReqs - @@logger.indent += 1 - success, err = @record\loadModules reqs, {@record.feed} - @@logger.indent -= 1 - unless success - @@logger.indent += 1 - @@logger\log err - @@logger.indent -= 1 - return finish -15, err - - -- since circular dependencies are possible, our task may have completed in the meantime - -- so check again if we still need to update - return finish 2 if @updated and @record\checkVersion update.version - - - -- download updated scripts to temp directory - -- check hashes before download, only update changed files - - tmpDir = aegisub.decode_path "?temp/l0.#{DependencyControl.__name}_#{'%04X'\format math.random 0, 16^4-1}" - res, dir = fileOps.mkdir tmpDir - return finish -30, "#{tmpDir} (#{err})" if res == nil - - @@logger\log msgs.performUpdate.updateReady, tmpDir - - scriptSubDir = @record.moduleName and @record.moduleName\gsub("%.","/") or @record.namespace - - dlm\clear! - for file in *update.files - file.type or= "script" - - continue if file.delete - baseName = scriptSubDir .. file.name - tmpName, prettyName = "#{tmpDir}/#{file.type}/#{baseName}", baseName - switch file.type - when "script" - file.fullName = "#{@record.automationDir}/#{baseName}" - when "test" - file.fullName = "#{@record.testDir}/#{baseName}" - prettyName ..= " (Unit Test)" - else - file.unknown = true - @@logger\log msgs.performUpdate.unknownType, file.name, file.type - continue - - unless type(file.sha1)=="string" and #file.sha1 == 40 and tonumber(file.sha1, 16) - return finish -35, "#{prettyName} (#{tostring(file.sha1)\lower!})" - - if dlm\checkFileSHA1 file.fullName, file.sha1 - @@logger\trace msgs.performUpdate.fileUnchanged, prettyName - continue - - dl, err = dlm\addDownload file.url, tmpName, file.sha1 - return finish -140, err unless dl - dl.targetFile = file.fullName - @@logger\trace msgs.performUpdate.fileAddDownload, file.url, prettyName - - dlm\waitForFinish (progress) -> - @@logger\progress progress, msgs.performUpdate.filesDownloading, dlm.downloadCount - return true - @@logger\progress! - - if dlm.failedCount>0 - err = @@logger\format ["#{dl.url}: #{dl.error}" for dl in *dlm.failedDownloads], 1 - return finish -245, err - - - -- move files to their destination directory and clean up - - @@logger\log msgs.performUpdate.movingFiles, @record.automationDir - moveErrors = {} - @@logger.indent += 1 - for dl in *dlm.downloads - res, err = fileOps.move dl.outfile, dl.targetFile, true - -- don't immediately error out if moving of a single file failed - -- try to move as many files as possible and let the user handle the rest - if res - @@logger\trace msgs.performUpdate.movedFile, dl.outfile, dl.targetFile - else - @@logger\log msgs.performUpdate.moveFileFailed, dl.outfile, dl.targetFile, err - moveErrors[#moveErrors+1] = err - @@logger.indent -= 1 - - if #moveErrors>0 - return finish -50, @@logger\format moveErrors, 1 - else lfs.rmdir tmpDir - os.remove file.fullName for file in *update.files when file.delete and not file.unknown - - -- Nuke old module refs and reload - oldVer, wasVirtual = @record.version, @record.virtual - - -- Update complete, refresh module information/configuration - if @record.moduleName - ref = @record\loadModule @record, false, true - unless ref - if @record._error - return finish -56, @@logger\format @record._error, 1 - else return finish -55 - - -- get a fresh version record - if type(ref.version) == "table" and ref.version.__class.__name == DependencyControl.__name - @record = ref.version - else - return finish -57 unless ref.version - success, rec = pcall DependencyControl, {moduleName: @record.moduleName, version: ref.version, unmanaged: true, name: @record.name} - return finish -58, rec unless success - @record = rec - @ref = ref - - else with @record - .name, .version, .virtual, .unmanaged = @record.name, @record\getVersionNumber update.version - @record\writeConfig true, false - - @updated = true - @@logger\log msgs.performUpdate.updSuccess, wasVirtual and "Download" or "Update", - @record.moduleName and "module" or "macro", - @record.name, @record\getVersionString! - - -- Diplay changelog - @@logger\log update\getChangelog @record, (@record.getVersionNumber oldVer) + 1 - @@logger\log msgs.performUpdate.reloadNotice - - -- TODO: check handling of private module copies (need extra return value?) - return finish 1, @record\getVersionString! - - - refreshRecord: => - with @record - wasVirtual, oldVersion = .virtual, .version - \loadConfig true - if wasVirtual and not .virtual or .version > oldVersion - @updated = true - @ref = \loadModule @record, false, true if .moduleName - if wasVirtual - @@logger\log msgs.refreshRecord.unsetVirtual, .moduleName and "module" or "macro", .name - else - @@logger\log msgs.refreshRecord.otherUpdate, .moduleName and "module" or "macro", .name, \getVersionString! - -class Updater extends UpdaterBase - DependencyControl = nil - msgs = { - getLock: { - orphaned: "Ignoring orphaned in-progress update started by %s." - waitFinished: "Waited %d seconds." - abortWait: "Timeout reached after %d seconds." - waiting: "Waiting for update intiated by %s to finish..." - } - require: { - macroPassed: "%s is not a module." - upToDate: "Tried to require an update for up-to-date module '%s'." - } - scheduleUpdate: { - updaterDisabled: "Skipping update check for %s (Updater disabled)." - runningUpdate: "Running scheduled update for %s '%s'..." - } - } - new: (@host = script_namespace, globalConfig, logger) => - @tasks = macros: {}, modules: {} - super.config = globalConfig - super.logger = logger if logger - - addTask: (record, targetVersion, addFeeds = {}, exhaustive, channel, optional) => - DependencyControl or= require "l0.DependencyControl" - if record.__class != DependencyControl - depRec = {saveRecordToConfig: false, readGlobalScriptVars: false} - depRec[k] = v for k, v in pairs record - record = DependencyControl depRec - - task = @tasks[record.type][record.namespace] - if task - return task\set targetVersion, addFeeds, exhaustive, channel, optional - else - task = UpdateTask record, targetVersion, addFeeds, exhaustive, channel, optional, @ - @tasks[record.type][record.namespace] = task - return task, err - - require: (record, ...) => - @@logger\assert record.moduleName, msgs.require, record.name or record.namespace - @@logger\log "%s module '%s'...", record.virtual and "Fetching required" or "Updating outdated", record.name - task, code = @addTask record, ... - code, res = task\run true if task - - if code == 0 and not task.updated - -- usually we know in advance if a module is up to date so there's no reason to block other updaters - -- but we'll make sure to handle this case gracefully, anyway - @@logger\debug msgs.require.upToDate, task.record.name or task.record.moduleName - return task.record\loadModule task.record.moduleName - elseif code >= 0 - return task.ref - else -- pass on update errors - return nil, code, res - - scheduleUpdate: (record) => - unless @@config.c.updaterEnabled - @@logger\trace msgs.scheduleUpdate.updaterDisabled, record.name or record.namespace - return -1 - - -- no regular updates for non-existing or unmanaged modules - if record.virtual or record.unmanaged - return -3 - - -- the update interval has not yet been passed since the last update check - if record.config.c.lastUpdateCheck and (record.config.c.lastUpdateCheck + @@config.c.updateInterval > os.time!) - return false - - record.config.c.lastUpdateCheck = os.time! - record.config\write! - - task = @addTask record -- no need to check for errors, because we've already accounted for those case - @@logger\trace msgs.scheduleUpdate.runningUpdate, record.moduleName and "module" or "macro", record.name - return task\run! - - - getLock: (doWait, waitTimeout = @@config.c.updateWaitTimeout) => - return true if @hasLock - - @@config\load! - running, didWait = @@config.c.updaterRunning - if running and running.host != @host - otherHost = running.host - - if running.time + @@config.c.updateOrphanTimeout < os.time! - @@logger\log msgs.getLock.orphaned, running.host - elseif doWait - @@logger\log msgs.getLock.waiting, running.host - timeout, didWait = waitTimeout, true - while running and timeout > 0 - PreciseTimer.sleep 1000 - timeout -= 1 - @@config\load! - running = @@config.c.updaterRunning - @@logger\log timeout <= 0 and msgs.getLock.abortWait or msgs.getLock.waitFinished, - waitTimeout - timeout - - else return false, running.host - - -- register the running update in the config file to prevent collisions - -- with other scripts trying to update the same modules - - @@config.c.updaterRunning = host: @host, time: os.time! - @@config\write! - @hasLock = true - - -- reload important module version information from configuration - -- because another updater instance might have updated them in the meantime - if didWait - task\refreshRecord! for _,task in pairs @tasks.modules - - return true - - releaseLock: => - return false unless @hasLock - @hasLock = false - @@config.c.updaterRunning = false +UpdateFeed = require "l0.DependencyControl.UpdateFeed" +fileOps = require "l0.DependencyControl.FileOps" +Logger = require "l0.DependencyControl.Logger" +DownloadManager = require "DM.DownloadManager" +PreciseTimer = require "PT.PreciseTimer" + +class UpdaterBase + @logger = Logger fileBaseName: @@__name + @config = nil -- set on creation of the Updater + msgs = { + updateError: { + [0]: "Couldn't %s %s '%s' because of a paradox: module not found but updater says up-to-date (%s)" + [1]: "Couldn't %s %s '%s' because the updater is disabled." + [2]: "Skipping %s of %s '%s': namespace '%s' doesn't conform to rules." + [3]: "Skipping %s of unmanaged %s '%s'." + [4]: "No remaining feed available to %s %s '%s' from." + [6]: "The %s of %s '%s' failed because no suitable package could be found %s." + [5]: "Skipped %s of %s '%s': Another update initiated by %s is already running." + [7]: "Skipped %s of %s '%s': An internet connection is currently not available." + [10]: "Skipped %s of %s '%s': the update task is already running." + [15]: "Couldn't %s %s '%s' because its requirements could not be satisfied:" + [30]: "Couldn't %s %s '%s': failed to create temporary download directory %s" + [35]: "Aborted %s of %s '%s' because the feed contained a missing or malformed SHA-1 hash for file %s." + [50]: "Couldn't finish %s of %s '%s' because some files couldn't be moved to their target location:\n" + [55]: "%s of %s '%s' succeeded, couldn't be located by the module loader." + [56]: "%s of %s '%s' succeeded, but an error occured while loading the module:\n%s" + [57]: "%s of %s '%s' succeeded, but it's missing a version record." + [58]: "%s of unmanaged %s '%s' succeeded, but an error occured while creating a DependencyControl record: %s" + [100]: "Error (%d) in component %s during %s of %s '%s':\n— %s" + } + updaterErrorComponent: {"DownloadManager (adding download)", "DownloadManager"} + } + + getUpdaterErrorMsg: (code, name, ...) => + args = {...} + if code <= -100 + -- Generic downstream error + -- VarArgs: 1: isModule, 2: isFetch, 3: error msg + return msgs.updateError[100]\format -code, msgs.updaterErrorComponent[math.floor(-code/100)], + args[2] and "fetch" or "update", args[1] and "module" or "macro", name, args[3] + else + -- Updater error: + -- VarArgs: 1: isModule, 2: isFetch, 3: additional information + return msgs.updateError[-code]\format args[2] and "fetch" or "update", + args[1] and "module" or "macro", + name, args[3] + +class UpdateTask extends UpdaterBase + DependencyControl = nil + dlm = DownloadManager! + msgs = { + checkFeed: { + downloadFailed: "Failed to download feed: %s" + noData: "The feed doesn't have any update information for %s '%s'." + badChannel: "The specified update channel '%s' wasn't present in the feed." + invalidVersion: "The feed contains an invalid version record for %s '%s' (channel: %s): %s." + unsupportedPlatform: "No download available for your platform '%s' (channel: %s)." + noFiles: "No files available to download for your platform '%s' (channel: %s)." + } + run: { + starting: "Starting %s of %s '%s'... " + fetching: "Trying to %sfetch missing %s '%s'..." + feedCandidates: "Trying %d candidate feeds (%s mode)..." + feedTrying: "Checking feed %d/%d (%s)..." + upToDate: "%s '%s' is up-to-date (v%s)." + alreadyUpdated: "%s v%s has already been installed." + noFeedAvailExt: "(required: %s; installed: %s; available: %s)" + noUpdate: "Feed has no new update." + skippedOptional: "Skipped %s of optional dependency '%s': %s" + optionalNoFeed: "No feed available to download module from." + optionalNoUpdate: "No suitable download could be found %s." + } + + performUpdate: { + updateReqs: "Checking requirements..." + updateReady: "Update ready. Using temporary directory '%s'." + fileUnchanged: "Skipped unchanged file '%s'." + fileAddDownload: "Added Download %s ==> '%s'." + filesDownloading: "Downloading %d files..." + movingFiles: "Downloads complete. Now moving files to Aegisub automation directory '%s'..." + movedFile: "Moved '%s' ==> '%s'." + moveFileFailed: "Failed to move '%s' ==> '%s': %s" + updSuccess: "%s of %s '%s' (v%s) complete." + reloadNotice: "Please rescan your autoload directory for the changes to take effect." + unknownType: "Skipping file '%s': unknown type '%s'." + } + refreshRecord: { + unsetVirtual: "Update initated by another macro already fetched %s '%s', switching to update mode." + otherUpdate: "Update initated by another macro already updated %s '%s' to v%s." + } + } + + new: (@record, targetVersion = 0, @addFeeds, @exhaustive, @channel, @optional, @updater) => + DependencyControl or= require "l0.DependencyControl" + assert @record.__class == DependencyControl, "First parameter must be a #{DependencyControl.__name} object." + + @triedFeeds = {} + @status = nil + @targetVersion = @record\getVersionNumber targetVersion + + return nil, -1 unless @@config.c.updaterEnabled + return nil, -2 unless @record\validateNamespace! + + set: (targetVersion, @addFeeds, @exhaustive, @channel, @optional) => + @targetVersion = @record\getVersionNumber targetVersion + return @ + + checkFeed: (feedUrl) => + -- get feed contents + feed = UpdateFeed feedUrl, false + unless feed.data -- no cached data available, perform download + success, err = feed\fetch! + unless success + return nil, msgs.checkFeed.downloadFailed\format err + + -- select our script and update channel + updateRecord = feed\getScript @record.namespace, @record.moduleName, @record.config, false + unless updateRecord + return nil, msgs.checkFeed.noData\format @record.moduleName and "module" or "macro", @record.name + + success, currentChannel = updateRecord\setChannel @channel + unless success + return nil, msgs.checkFeed.badChannel\format currentChannel + + -- check if an update is available and satisfies our requirements + res, version = @record\checkVersion updateRecord.version + if res == nil + return nil, msgs.checkFeed.invalidVersion\format @record.moduleName and "module" or "macro", + @record.name, currentChannel, tostring updateRecord.version + elseif res or @targetVersion > version + return false, nil, version + + -- check if our platform is supported/files are available to download + res, platform = updateRecord\checkPlatform! + unless res + return nil, msgs.checkFeed.unsupportedPlatform\format platform, currentChannel + if #updateRecord.files == 0 + return nil, msgs.checkFeed.noFiles\format platform, currentChannel + + return true, updateRecord, version + + + run: (waitLock, exhaustive = @@config.c.tryAllFeeds or @@exhaustive) => + logUpdateError = (code, extErr, virtual = @virtual) -> + if code < 0 + @@logger\log @getUpdaterErrorMsg code, @record.name, @record.moduleName, virtual, extErr + return code, extErr + + with @record do @@logger\log msgs.run.starting, .virtual and "download" or "update", + .moduleName and "module" or "macro", .name + + -- don't perform update of a script when another one is already running for the same script + return logUpdateError -10 if @running + + -- check if the script was already updated + if @updated and not exhaustive and @record\checkVersion @targetVersion + @@logger\log msgs.run.alreadyUpdated, @record.name, @record\getVersionString! + return 2 + + -- build feed list + userFeed, haveFeeds, feeds = @record.config.c.userFeed, {}, {} + if userFeed and not @triedFeeds[userFeed] + feeds[1] = userFeed + else + unless @triedFeeds[@record.feed] or haveFeeds[@record.feed] + feeds[1] = @record.feed + for feed in *@addFeeds + unless @triedFeeds[feed] or haveFeeds[feed] + feeds[#feeds+1] = feed + + for feed in *@@config.c.extraFeeds + unless @triedFeeds[feed] or haveFeeds[feed] + feeds[#feeds+1] = feed + + if #feeds == 0 + if @optional + @@logger\log msgs.run.skippedOptional, @record.name, + @record.virtual and "download" or "update", msgs.run.optionalNoFeed + return 3 + + return logUpdateError -4 + + -- check internet connection + return logUpdateError -7 unless dlm\isInternetConnected! + + -- get a lock on the updater + success, otherHost = @updater\getLock waitLock + return logUpdateError -5, otherHost unless success + + -- check feeds for update until we find and update or run out of feeds to check + -- normal mode: check feeds until an update matching the required version is found + -- exhaustive mode: check all feeds for updates and pick the highest version + + @@logger\log msgs.run.feedCandidates, #feeds, exhaustive and "exhaustive" or "normal" + @@logger.indent += 1 + + maxVer, updateRecord = 0 + for i, feed in ipairs feeds + @@logger\log msgs.run.feedTrying, i, #feeds, feed + + res, rec, version = @checkFeed feed + @triedFeeds[feed] = true + if res == nil + @@logger\log rec + elseif version > maxVer + maxVer = version + if res + updateRecord = rec + break unless exhaustive + else @@logger\trace msgs.run.noUpdate + else + @@logger\trace msgs.run.noUpdate + + @@logger.indent -= 1 + + local code, res + wasVirtual = @record.virtual + unless updateRecord + -- for a script to be marked up-to-date it has to installed on the user's system + -- and the version must at least be that returned by at least one feed + if maxVer>0 and not @record.virtual and @targetVersion <= @record.version + @@logger\log msgs.run.upToDate, @record.moduleName and "Module" or "Macro", + @record.name, @record\getVersionString! + return 0 + + res = msgs.run.noFeedAvailExt\format @targetVersion == 0 and "any" or @record\getVersionString(@targetVersion), + @record.virtual and "no" or @record\getVersionString!, + maxVer<1 and "none" or @record\getVersionString maxVer + + if @optional + @@logger\log msgs.run.skippedOptional, @record.name, @record.virtual and "download" or "update", + msgs.run.optionalNoUpdate\format res + return 3 + + return logUpdateError -6, res + + code, res = @performUpdate updateRecord + return logUpdateError code, res, wasVirtual + + performUpdate: (update) => + finish = (...) -> + @running = false + if @record.virtual or @record.unmanaged + @record\removeDummyRef! + return ... + + -- don't perform update of a script when another one is already running for the same script + return finish -10 if @running + @running = true + + -- set a dummy ref (which hasn't yet been set for virtual and unmanaged modules) + -- and record version to allow resolving circular dependencies + if @record.virtual or @record.unmanaged + @record\createDummyRef! + @record\setVersion update.version + + -- try to load required modules first to see if all dependencies are satisfied + -- this may trigger more updates + reqs = update.requiredModules + if reqs and #reqs > 0 + @@logger\log msgs.performUpdate.updateReqs + @@logger.indent += 1 + success, err = @record\loadModules reqs, {@record.feed} + @@logger.indent -= 1 + unless success + @@logger.indent += 1 + @@logger\log err + @@logger.indent -= 1 + return finish -15, err + + -- since circular dependencies are possible, our task may have completed in the meantime + -- so check again if we still need to update + return finish 2 if @updated and @record\checkVersion update.version + + + -- download updated scripts to temp directory + -- check hashes before download, only update changed files + + tmpDir = aegisub.decode_path "?temp/l0.#{DependencyControl.__name}_#{'%04X'\format math.random 0, 16^4-1}" + res, dir = fileOps.mkdir tmpDir + return finish -30, "#{tmpDir} (#{err})" if res == nil + + @@logger\log msgs.performUpdate.updateReady, tmpDir + + scriptSubDir = @record.moduleName and @record.moduleName\gsub("%.","/") or @record.namespace + + dlm\clear! + for file in *update.files + file.type or= "script" + + continue if file.delete + baseName = scriptSubDir .. file.name + tmpName, prettyName = "#{tmpDir}/#{file.type}/#{baseName}", baseName + switch file.type + when "script" + file.fullName = "#{@record.automationDir}/#{baseName}" + when "test" + file.fullName = "#{@record.testDir}/#{baseName}" + prettyName ..= " (Unit Test)" + else + file.unknown = true + @@logger\log msgs.performUpdate.unknownType, file.name, file.type + continue + + unless type(file.sha1)=="string" and #file.sha1 == 40 and tonumber(file.sha1, 16) + return finish -35, "#{prettyName} (#{tostring(file.sha1)\lower!})" + + if dlm\checkFileSHA1 file.fullName, file.sha1 + @@logger\trace msgs.performUpdate.fileUnchanged, prettyName + continue + + dl, err = dlm\addDownload file.url, tmpName, file.sha1 + return finish -140, err unless dl + dl.targetFile = file.fullName + @@logger\trace msgs.performUpdate.fileAddDownload, file.url, prettyName + + dlm\waitForFinish (progress) -> + @@logger\progress progress, msgs.performUpdate.filesDownloading, dlm.downloadCount + return true + @@logger\progress! + + if dlm.failedCount>0 + err = @@logger\format ["#{dl.url}: #{dl.error}" for dl in *dlm.failedDownloads], 1 + return finish -245, err + + + -- move files to their destination directory and clean up + + @@logger\log msgs.performUpdate.movingFiles, @record.automationDir + moveErrors = {} + @@logger.indent += 1 + for dl in *dlm.downloads + res, err = fileOps.move dl.outfile, dl.targetFile, true + -- don't immediately error out if moving of a single file failed + -- try to move as many files as possible and let the user handle the rest + if res + @@logger\trace msgs.performUpdate.movedFile, dl.outfile, dl.targetFile + else + @@logger\log msgs.performUpdate.moveFileFailed, dl.outfile, dl.targetFile, err + moveErrors[#moveErrors+1] = err + @@logger.indent -= 1 + + if #moveErrors>0 + return finish -50, @@logger\format moveErrors, 1 + else lfs.rmdir tmpDir + os.remove file.fullName for file in *update.files when file.delete and not file.unknown + + -- Nuke old module refs and reload + oldVer, wasVirtual = @record.version, @record.virtual + + -- Update complete, refresh module information/configuration + if @record.moduleName + ref = @record\loadModule @record, false, true + unless ref + if @record._error + return finish -56, @@logger\format @record._error, 1 + else return finish -55 + + -- get a fresh version record + if type(ref.version) == "table" and ref.version.__class.__name == DependencyControl.__name + @record = ref.version + else + return finish -57 unless ref.version + success, rec = pcall DependencyControl, {moduleName: @record.moduleName, version: ref.version, unmanaged: true, name: @record.name} + return finish -58, rec unless success + @record = rec + @ref = ref + + else with @record + .name, .version, .virtual, .unmanaged = @record.name, @record\getVersionNumber update.version + @record\writeConfig true, false + + @updated = true + @@logger\log msgs.performUpdate.updSuccess, wasVirtual and "Download" or "Update", + @record.moduleName and "module" or "macro", + @record.name, @record\getVersionString! + + -- Diplay changelog + @@logger\log update\getChangelog @record, (@record.getVersionNumber oldVer) + 1 + @@logger\log msgs.performUpdate.reloadNotice + + -- TODO: check handling of private module copies (need extra return value?) + return finish 1, @record\getVersionString! + + + refreshRecord: => + with @record + wasVirtual, oldVersion = .virtual, .version + \loadConfig true + if wasVirtual and not .virtual or .version > oldVersion + @updated = true + @ref = \loadModule @record, false, true if .moduleName + if wasVirtual + @@logger\log msgs.refreshRecord.unsetVirtual, .moduleName and "module" or "macro", .name + else + @@logger\log msgs.refreshRecord.otherUpdate, .moduleName and "module" or "macro", .name, \getVersionString! + +class Updater extends UpdaterBase + DependencyControl = nil + msgs = { + getLock: { + orphaned: "Ignoring orphaned in-progress update started by %s." + waitFinished: "Waited %d seconds." + abortWait: "Timeout reached after %d seconds." + waiting: "Waiting for update intiated by %s to finish..." + } + require: { + macroPassed: "%s is not a module." + upToDate: "Tried to require an update for up-to-date module '%s'." + } + scheduleUpdate: { + updaterDisabled: "Skipping update check for %s (Updater disabled)." + runningUpdate: "Running scheduled update for %s '%s'..." + } + } + new: (@host = script_namespace, globalConfig, logger) => + @tasks = macros: {}, modules: {} + super.config = globalConfig + super.logger = logger if logger + + addTask: (record, targetVersion, addFeeds = {}, exhaustive, channel, optional) => + DependencyControl or= require "l0.DependencyControl" + if record.__class != DependencyControl + depRec = {saveRecordToConfig: false, readGlobalScriptVars: false} + depRec[k] = v for k, v in pairs record + record = DependencyControl depRec + + task = @tasks[record.type][record.namespace] + if task + return task\set targetVersion, addFeeds, exhaustive, channel, optional + else + task = UpdateTask record, targetVersion, addFeeds, exhaustive, channel, optional, @ + @tasks[record.type][record.namespace] = task + return task, err + + require: (record, ...) => + @@logger\assert record.moduleName, msgs.require, record.name or record.namespace + @@logger\log "%s module '%s'...", record.virtual and "Fetching required" or "Updating outdated", record.name + task, code = @addTask record, ... + code, res = task\run true if task + + if code == 0 and not task.updated + -- usually we know in advance if a module is up to date so there's no reason to block other updaters + -- but we'll make sure to handle this case gracefully, anyway + @@logger\debug msgs.require.upToDate, task.record.name or task.record.moduleName + return task.record\loadModule task.record.moduleName + elseif code >= 0 + return task.ref + else -- pass on update errors + return nil, code, res + + scheduleUpdate: (record) => + unless @@config.c.updaterEnabled + @@logger\trace msgs.scheduleUpdate.updaterDisabled, record.name or record.namespace + return -1 + + -- no regular updates for non-existing or unmanaged modules + if record.virtual or record.unmanaged + return -3 + + -- the update interval has not yet been passed since the last update check + if record.config.c.lastUpdateCheck and (record.config.c.lastUpdateCheck + @@config.c.updateInterval > os.time!) + return false + + record.config.c.lastUpdateCheck = os.time! + record.config\write! + + task = @addTask record -- no need to check for errors, because we've already accounted for those case + @@logger\trace msgs.scheduleUpdate.runningUpdate, record.moduleName and "module" or "macro", record.name + return task\run! + + + getLock: (doWait, waitTimeout = @@config.c.updateWaitTimeout) => + return true if @hasLock + + @@config\load! + running, didWait = @@config.c.updaterRunning + if running and running.host != @host + otherHost = running.host + + if running.time + @@config.c.updateOrphanTimeout < os.time! + @@logger\log msgs.getLock.orphaned, running.host + elseif doWait + @@logger\log msgs.getLock.waiting, running.host + timeout, didWait = waitTimeout, true + while running and timeout > 0 + PreciseTimer.sleep 1000 + timeout -= 1 + @@config\load! + running = @@config.c.updaterRunning + @@logger\log timeout <= 0 and msgs.getLock.abortWait or msgs.getLock.waitFinished, + waitTimeout - timeout + + else return false, running.host + + -- register the running update in the config file to prevent collisions + -- with other scripts trying to update the same modules + + @@config.c.updaterRunning = host: @host, time: os.time! + @@config\write! + @hasLock = true + + -- reload important module version information from configuration + -- because another updater instance might have updated them in the meantime + if didWait + task\refreshRecord! for _,task in pairs @tasks.modules + + return true + + releaseLock: => + return false unless @hasLock + @hasLock = false + @@config.c.updaterRunning = false @@config\write! \ No newline at end of file