commit eaff8699a44c36aea649c95677ff500e7f028b9d
parent c8b7e2352fc24dc71142b1bc4c78084b9cea202a
Author: Remy Noulin (Spartatek) <remy.noulin@spartatek.se>
Date: Thu, 1 Dec 2016 12:34:06 +0100
Add transform, transformTo, transformFrom commands
transformTo runs a command in clean filter
transformFrom runs a command in smudge filter
README.md | 8 +++
package.json | 2 +-
src/git-off | 208 ++++++++++++++++++++++++++++++++++++++++++++++++------
src/gitoff.coffee | 196 +++++++++++++++++++++++++++++++++++++++++++++-----
4 files changed, 376 insertions(+), 38 deletions(-)
Diffstat:
4 files changed, 376 insertions(+), 38 deletions(-)
diff --git a/README.md b/README.md
@@ -251,6 +251,14 @@ git off s3region [thisrepo] [region]
setup amazon s3 region for the bucket
git off s3bucket [thisrepo] [bucket]
setup amazon s3 bucket
+git off transform [thisrepo] [enable|disable]
+ enable transform in clean and smudge filters
+git off transformTo [thisrepo] ['cmd _1 _2']
+ setup transform command for clear filter
+ When the command is empty the regular transport is performed
+git off transformFrom [thisrepo] ['cmd _1 _2']
+ setup transform command for smudge filter
+ When the command is empty the regular transport is performed
git off clean
internal filter
dont use directly
diff --git a/package.json b/package.json
@@ -1,6 +1,6 @@
{
"name": "git-off",
- "version": "0.0.8",
+ "version": "0.0.9",
"description": "large file handler for git",
"bin": "./src/git-off",
"scripts": {
diff --git a/src/git-off b/src/git-off
@@ -84,7 +84,8 @@ externalHelpers = {
'listAttr': 'git check-attr -a',
'ssh': 'ssh',
'scp': 'scp',
- 'curl': 'curl'
+ 'curl': 'curl',
+ 'mv': 'mv'
};
runtimeConfig = {
@@ -103,7 +104,10 @@ runtimeConfig = {
'log': '',
'offConfigAlways': '',
's3Region': '',
- 's3Bucket': ''
+ 's3Bucket': '',
+ 'transform': '',
+ 'transformTo': '',
+ 'transformFrom': ''
};
offDEFAULTS = {
@@ -122,6 +126,9 @@ offDEFAULTS = {
'configAlways': 'offNoValue',
's3Region': 'offNoValue',
's3Bucket': 'offNoValue',
+ 'transform': 'disable',
+ 'transformTo': 'pbzip2 -9 -c _1 > _2',
+ 'transformFrom': 'pbzip2 -d -c _1 > _2',
'prePush': '#!/bin/sh\ncommand -v git-off >/dev/null 2>&1 || { echo >&2 "\\nThis repository is configured for Git off but \'git-off\' was not found on your path. If you no longer wish to use git off, remove this hook by deleting .git/hooks/pre-push.\\n"; exit 2; }\ngit off pre-push "$@"',
'offSignature': '### git-off v1 sha:',
'shaLength': 40
@@ -371,6 +378,24 @@ offHelpers = {
}
return runtimeConfig.s3Bucket;
},
+ 'transform': function() {
+ if (runtimeConfig.transform === '') {
+ runtimeConfig.transform = gitConfig.get('off.transform');
+ }
+ return runtimeConfig.transform;
+ },
+ 'transformTo': function() {
+ if (runtimeConfig.transformTo === '') {
+ runtimeConfig.transformTo = gitConfig.get('off.transformTo');
+ }
+ return runtimeConfig.transformTo;
+ },
+ 'transformFrom': function() {
+ if (runtimeConfig.transformFrom === '') {
+ runtimeConfig.transformFrom = gitConfig.get('off.transformFrom');
+ }
+ return runtimeConfig.transformFrom;
+ },
'userAt': function() {
var user;
if (offHelpers.offScpUser() !== '') {
@@ -477,10 +502,23 @@ offHelpers = {
fs.chmodSync(offHelpers.offStore() + '/' + file, '444');
};
transport['receive'] = function(file) {
- var readStream;
- if (offHelpers.checkIntegrity(offHelpers.offStore() + '/' + file)) {
- readStream = fs.createReadStream(offHelpers.offStore() + '/' + file);
- readStream.pipe(process.stdout);
+ var f_l, readStream;
+ if (offHelpers.transform() === 'enable' && offHelpers.transformFrom() !== '') {
+ f_l = file.split('/');
+ if (fs.existsSync(offHelpers.objectPath() + '/' + f_l[0] + '/' + f_l[1]) === false) {
+ mkdirParents(offHelpers.objectPath() + '/' + f_l[0] + '/' + f_l[1]);
+ }
+ copy(offHelpers.offStore() + '/' + file, offHelpers.objectPath() + '/' + file);
+ transport['transformFrom'](file);
+ if (offHelpers.checkIntegrity(offHelpers.objectPath() + '/../tmp/' + file)) {
+ readStream = fs.createReadStream(offHelpers.objectPath() + '/../tmp/' + file);
+ readStream.pipe(process.stdout);
+ }
+ } else {
+ if (offHelpers.checkIntegrity(offHelpers.offStore() + '/' + file)) {
+ readStream = fs.createReadStream(offHelpers.offStore() + '/' + file);
+ readStream.pipe(process.stdout);
+ }
}
};
} else if (mode === 'scp') {
@@ -517,9 +555,17 @@ offHelpers = {
} else {
exec('scp', [offHelpers.offScpOptions(), pem, '-P ' + h_l[2], h_l[0] + ':' + h_l[1] + '/' + file, offHelpers.objectPath() + '/' + file]);
}
- if (offHelpers.checkIntegrity(offHelpers.objectPath() + '/' + file)) {
- readStream = fs.createReadStream(offHelpers.objectPath() + '/' + file);
- readStream.pipe(process.stdout);
+ if (offHelpers.transform() === 'enable' && offHelpers.transformFrom() !== '') {
+ transport['transformFrom'](file);
+ if (offHelpers.checkIntegrity(offHelpers.objectPath() + '/../tmp/' + file)) {
+ readStream = fs.createReadStream(offHelpers.objectPath() + '/../tmp/' + file);
+ readStream.pipe(process.stdout);
+ }
+ } else {
+ if (offHelpers.checkIntegrity(offHelpers.objectPath() + '/' + file)) {
+ readStream = fs.createReadStream(offHelpers.objectPath() + '/' + file);
+ readStream.pipe(process.stdout);
+ }
}
};
} else if (mode === 'http') {
@@ -533,9 +579,17 @@ offHelpers = {
mkdirParents(offHelpers.objectPath() + '/' + f_l[0] + '/' + f_l[1]);
}
exec('curl', [offHelpers.offCurlOptions(), offHelpers.objectPath() + '/' + file, offHelpers.offHttp() + '/' + file]);
- if (offHelpers.checkIntegrity(offHelpers.objectPath() + '/' + file)) {
- readStream = fs.createReadStream(offHelpers.objectPath() + '/' + file);
- readStream.pipe(process.stdout);
+ if (offHelpers.transform() === 'enable' && offHelpers.transformFrom() !== '') {
+ transport['transformFrom'](file);
+ if (offHelpers.checkIntegrity(offHelpers.objectPath() + '/../tmp/' + file)) {
+ readStream = fs.createReadStream(offHelpers.objectPath() + '/../tmp/' + file);
+ readStream.pipe(process.stdout);
+ }
+ } else {
+ if (offHelpers.checkIntegrity(offHelpers.objectPath() + '/' + file)) {
+ readStream = fs.createReadStream(offHelpers.objectPath() + '/' + file);
+ readStream.pipe(process.stdout);
+ }
}
};
} else if (mode === 's3') {
@@ -573,9 +627,17 @@ offHelpers = {
fileStream = fs.createWriteStream(offHelpers.objectPath() + '/' + file);
fileStream.on('finish', function() {
var readStream;
- if (offHelpers.checkIntegrity(offHelpers.objectPath() + '/' + file)) {
- readStream = fs.createReadStream(offHelpers.objectPath() + '/' + file);
- readStream.pipe(process.stdout);
+ if (offHelpers.transform() === 'enable' && offHelpers.transformFrom() !== '') {
+ transport['transformFrom'](file);
+ if (offHelpers.checkIntegrity(offHelpers.objectPath() + '/../tmp/' + file)) {
+ readStream = fs.createReadStream(offHelpers.objectPath() + '/../tmp/' + file);
+ readStream.pipe(process.stdout);
+ }
+ } else {
+ if (offHelpers.checkIntegrity(offHelpers.objectPath() + '/' + file)) {
+ readStream = fs.createReadStream(offHelpers.objectPath() + '/' + file);
+ readStream.pipe(process.stdout);
+ }
}
});
s3.getObject(dlParams, function(err, data) {}).on('httpData', function(chunk) {
@@ -598,7 +660,19 @@ offHelpers = {
transport = {
'send': function(src) {},
- 'receive': function(src) {}
+ 'receive': function(src) {},
+ 'transformFrom': function(file) {
+ var cmd, f_l, offFile, offFilePath, r;
+ f_l = file.split('/');
+ offFile = f_l[f_l.length - 1];
+ offFilePath = offHelpers.getOffFilePath(offFile);
+ if (fs.existsSync(offHelpers.objectPath() + '/../tmp/' + offFilePath[1]) === false) {
+ mkdirParents(offHelpers.objectPath() + '/../tmp/' + offFilePath[1]);
+ }
+ cmd = offHelpers.transformFrom();
+ cmd = cmd.replace('_1', offHelpers.objectPath() + '/' + file).replace('_2', offHelpers.objectPath() + '/../tmp/' + file);
+ r = syncexec(cmd);
+ }
};
offCommands = {
@@ -672,6 +746,15 @@ offCommands = {
if (offHelpers.s3Bucket() === '' || setCfg !== gitConfig.set) {
setCfg('off.s3Bucket', offDEFAULTS.s3Bucket);
}
+ if (offHelpers.transform() === '' || setCfg !== gitConfig.set) {
+ setCfg('off.transform', offDEFAULTS.transform);
+ }
+ if (offHelpers.transformTo() === '' || setCfg !== gitConfig.set) {
+ setCfg('off.transformTo', offDEFAULTS.transformTo);
+ }
+ if (offHelpers.transformFrom() === '' || setCfg !== gitConfig.set) {
+ setCfg('off.transformFrom', offDEFAULTS.transformFrom);
+ }
if (runtimeConfig.offMode === 'copy') {
mkdirParents(runtimeConfig.offStore);
}
@@ -825,8 +908,38 @@ offCommands = {
console.log('off.s3bucket '.blue.bold + offHelpers.s3Bucket());
}
},
+ 'transform': function(setCfg) {
+ var len, setting;
+ len = process.argv.length;
+ setting = process.argv[len - 1];
+ if (setting !== 'transform' && setting !== 'thisrepo') {
+ setCfg('off.transform', setting);
+ } else {
+ console.log('off.transform '.blue.bold + offHelpers.transform());
+ }
+ },
+ 'transformTo': function(setCfg) {
+ var len, setting;
+ len = process.argv.length;
+ setting = process.argv[len - 1];
+ if (setting !== 'transformTo' && setting !== 'thisrepo') {
+ setCfg('off.transformTo', setting);
+ } else {
+ console.log('off.transformTo '.blue.bold + offHelpers.transformTo());
+ }
+ },
+ 'transformFrom': function(setCfg) {
+ var len, setting;
+ len = process.argv.length;
+ setting = process.argv[len - 1];
+ if (setting !== 'transformFrom' && setting !== 'thisrepo') {
+ setCfg('off.transformFrom', setting);
+ } else {
+ console.log('off.transformFrom '.blue.bold + offHelpers.transformFrom());
+ }
+ },
'clean': function() {
- var file, offFile, offFilePath, r, size, writeStream;
+ var file, offFile, offFilePath, pipe, r, size, writeStream;
offCommands.localSetup();
file = process.argv[3];
size = fs.statSync(file).size;
@@ -839,8 +952,25 @@ offCommands = {
offFilePath = offFilePath[0];
if (fs.existsSync(offHelpers.objectPath() + '/' + offFilePath) === false) {
writeStream = fs.createWriteStream(offHelpers.objectPath() + '/' + offFilePath);
- process.stdin.pipe(writeStream);
- fs.chmodSync(offHelpers.objectPath() + '/' + offFilePath, '444');
+ pipe = process.stdin.pipe(writeStream);
+ pipe.on('finish', function() {
+ var cmd;
+ fs.chmodSync(offHelpers.objectPath() + '/' + offFilePath, '444');
+ if (offHelpers.transform() === 'enable' && offHelpers.transformTo() !== '') {
+ if (fs.existsSync(offHelpers.objectPath() + '/../tmp' === false)) {
+ mkdirParents(offHelpers.objectPath() + '/../tmp');
+ }
+ offFilePath = offHelpers.getOffFilePath(offFile);
+ if (fs.existsSync(offHelpers.objectPath() + '/../tmp/' + offFilePath[1]) === false) {
+ mkdirParents(offHelpers.objectPath() + '/../tmp/' + offFilePath[1]);
+ }
+ offFilePath = offFilePath[0];
+ exec('mv', [offHelpers.objectPath() + '/' + offFilePath, offHelpers.objectPath() + '/../tmp/' + offFilePath]);
+ cmd = offHelpers.transformTo();
+ cmd = cmd.replace('_1', offHelpers.objectPath() + '/../tmp/' + offFilePath).replace('_2', offHelpers.objectPath() + '/' + offFilePath);
+ r = syncexec(cmd);
+ }
+ });
} else {
writeStream = fs.createWriteStream('/dev/null');
process.stdin.pipe(writeStream);
@@ -898,7 +1028,7 @@ offCommands = {
offCommands.localSetup();
status = 'header';
process.stdin.on('readable', function() {
- var data, decoder, header, offFile, offFilePath, readStream;
+ var cmd, data, decoder, header, offFile, offFilePath, r, readStream;
if (status === 'header') {
status = 'stream';
data = process.stdin.read(offDEFAULTS.offSignature.length + offDEFAULTS.shaLength);
@@ -912,7 +1042,22 @@ offCommands = {
offFile = header.split(':')[1];
offFilePath = offHelpers.getOffFilePath(offFile)[0];
if (fs.existsSync(offHelpers.objectPath() + '/' + offFilePath) === true) {
- readStream = fs.createReadStream(offHelpers.objectPath() + '/' + offFilePath);
+ if (offHelpers.transform() === 'enable' && offHelpers.transformFrom() !== '') {
+ if (fs.existsSync(offHelpers.objectPath() + '/../tmp' === false)) {
+ mkdirParents(offHelpers.objectPath() + '/../tmp');
+ }
+ offFilePath = offHelpers.getOffFilePath(offFile);
+ if (fs.existsSync(offHelpers.objectPath() + '/../tmp/' + offFilePath[1]) === false) {
+ mkdirParents(offHelpers.objectPath() + '/../tmp/' + offFilePath[1]);
+ }
+ offFilePath = offFilePath[0];
+ cmd = offHelpers.transformFrom();
+ cmd = cmd.replace('_1', offHelpers.objectPath() + '/' + offFilePath).replace('_2', offHelpers.objectPath() + '/../tmp/' + offFilePath);
+ r = syncexec(cmd);
+ readStream = fs.createReadStream(offHelpers.objectPath() + '/../tmp/' + offFilePath);
+ } else {
+ readStream = fs.createReadStream(offHelpers.objectPath() + '/' + offFilePath);
+ }
readStream.pipe(process.stdout);
} else {
transport.receive(offFilePath);
@@ -971,6 +1116,9 @@ offCommands = {
console.log('off.configAlways '.blue.bold + offHelpers.offConfigAlways());
console.log('off.s3region '.blue.bold + offHelpers.s3Region());
console.log('off.s3bucket '.blue.bold + offHelpers.s3Bucket());
+ console.log('off.transform '.blue.bold + offHelpers.transform());
+ console.log('off.transformTo '.blue.bold + offHelpers.transformTo());
+ console.log('off.transformFrom '.blue.bold + offHelpers.transformFrom());
},
'help': function() {
var c, i, len1, ref;
@@ -1120,6 +1268,24 @@ COMMAND_MAP = {
},
h: 'git off s3bucket [thisrepo] [bucket]\n setup amazon s3 bucket'
},
+ 'transform': {
+ f: function() {
+ thisrepo(offCommands['transform']);
+ },
+ h: 'git off transform [thisrepo] [enable|disable]\n enable transform in clean and smudge filters'
+ },
+ 'transformTo': {
+ f: function() {
+ thisrepo(offCommands['transformTo']);
+ },
+ h: "git off transformTo [thisrepo] ['cmd _1 _2']\n setup transform command for clear filter\n When the command is empty the regular transport is performed"
+ },
+ 'transformFrom': {
+ f: function() {
+ thisrepo(offCommands['transformFrom']);
+ },
+ h: "git off transformFrom [thisrepo] ['cmd _1 _2']\n setup transform command for smudge filter\n When the command is empty the regular transport is performed"
+ },
'clean': {
f: function() {
offCommands.clean();
diff --git a/src/gitoff.coffee b/src/gitoff.coffee
@@ -107,6 +107,7 @@ externalHelpers =
'ssh': 'ssh'
'scp': 'scp'
'curl': 'curl'
+ 'mv': 'mv'
# config built by offHelpers
# use offHelpers to access runtimeConfig
@@ -127,6 +128,9 @@ runtimeConfig =
'offConfigAlways': ''
's3Region': ''
's3Bucket': ''
+ 'transform': ''
+ 'transformTo': ''
+ 'transformFrom': ''
# default configuration for first time install
# objectPath is git off cache in repo
@@ -146,6 +150,9 @@ offDEFAULTS =
'configAlways': 'offNoValue'
's3Region': 'offNoValue'
's3Bucket': 'offNoValue'
+ 'transform': 'disable'
+ 'transformTo': 'pbzip2 -9 -c _1 > _2'
+ 'transformFrom': 'pbzip2 -d -c _1 > _2'
'prePush': '#!/bin/sh\ncommand -v git-off >/dev/null 2>&1 || { echo >&2 "\\nThis repository is configured for Git off but \'git-off\' was not found on your path. If you no longer wish to use git off, remove this hook by deleting .git/hooks/pre-push.\\n"; exit 2; }\ngit off pre-push "$@"'
'offSignature': '### git-off v1 sha:'
'shaLength': 40
@@ -390,6 +397,21 @@ offHelpers =
runtimeConfig.s3Bucket = gitConfig.get 'off.s3bucket'
runtimeConfig.s3Bucket
+ 'transform': ->
+ if runtimeConfig.transform == ''
+ runtimeConfig.transform = gitConfig.get 'off.transform'
+ runtimeConfig.transform
+
+ 'transformTo': ->
+ if runtimeConfig.transformTo == ''
+ runtimeConfig.transformTo = gitConfig.get 'off.transformTo'
+ runtimeConfig.transformTo
+
+ 'transformFrom': ->
+ if runtimeConfig.transformFrom == ''
+ runtimeConfig.transformFrom = gitConfig.get 'off.transformFrom'
+ runtimeConfig.transformFrom
+
'userAt': ->
if offHelpers.offScpUser() != ''
user = offHelpers.offScpUser() + '@'
@@ -519,9 +541,22 @@ offHelpers =
fs.chmodSync offHelpers.offStore() + '/' + file, '444'
return
transport['receive'] = (file) ->
- if offHelpers.checkIntegrity offHelpers.offStore() + '/' + file
- readStream = fs.createReadStream(offHelpers.offStore() + '/' + file)
- readStream.pipe(process.stdout)
+ # transfer and transform
+ if offHelpers.transform() == 'enable' and offHelpers.transformFrom() != ''
+ # create file directories in objectPath
+ f_l = file.split '/'
+ if fs.existsSync(offHelpers.objectPath() + '/' + f_l[0] + '/' + f_l[1]) == false
+ mkdirParents offHelpers.objectPath() + '/' + f_l[0] + '/' + f_l[1]
+
+ copy offHelpers.offStore() + '/' + file, offHelpers.objectPath() + '/' + file
+ transport['transformFrom'](file)
+ if offHelpers.checkIntegrity offHelpers.objectPath() + '/../tmp/' + file
+ readStream = fs.createReadStream(offHelpers.objectPath() + '/../tmp/' + file)
+ readStream.pipe(process.stdout)
+ else
+ if offHelpers.checkIntegrity offHelpers.offStore() + '/' + file
+ readStream = fs.createReadStream(offHelpers.offStore() + '/' + file)
+ readStream.pipe(process.stdout)
return
# scp mode
@@ -562,9 +597,16 @@ offHelpers =
else
exec 'scp', [offHelpers.offScpOptions(), pem, '-P ' + h_l[2], h_l[0] + ':' + h_l[1] + '/' + file, offHelpers.objectPath() + '/' + file]
- if offHelpers.checkIntegrity offHelpers.objectPath() + '/' + file
- readStream = fs.createReadStream(offHelpers.objectPath() + '/' + file)
- readStream.pipe(process.stdout)
+ # transform
+ if offHelpers.transform() == 'enable' and offHelpers.transformFrom() != ''
+ transport['transformFrom'](file)
+ if offHelpers.checkIntegrity offHelpers.objectPath() + '/../tmp/' + file
+ readStream = fs.createReadStream(offHelpers.objectPath() + '/../tmp/' + file)
+ readStream.pipe(process.stdout)
+ else
+ if offHelpers.checkIntegrity offHelpers.objectPath() + '/' + file
+ readStream = fs.createReadStream(offHelpers.objectPath() + '/' + file)
+ readStream.pipe(process.stdout)
return
# http mode
@@ -580,9 +622,16 @@ offHelpers =
exec 'curl', [offHelpers.offCurlOptions(), offHelpers.objectPath() + '/' + file, offHelpers.offHttp() + '/' + file]
- if offHelpers.checkIntegrity offHelpers.objectPath() + '/' + file
- readStream = fs.createReadStream(offHelpers.objectPath() + '/' + file)
- readStream.pipe(process.stdout)
+ # transform
+ if offHelpers.transform() == 'enable' and offHelpers.transformFrom() != ''
+ transport['transformFrom'](file)
+ if offHelpers.checkIntegrity offHelpers.objectPath() + '/../tmp/' + file
+ readStream = fs.createReadStream(offHelpers.objectPath() + '/../tmp/' + file)
+ readStream.pipe(process.stdout)
+ else
+ if offHelpers.checkIntegrity offHelpers.objectPath() + '/' + file
+ readStream = fs.createReadStream(offHelpers.objectPath() + '/' + file)
+ readStream.pipe(process.stdout)
return
# s3 mode
@@ -615,9 +664,16 @@ offHelpers =
fileStream = fs.createWriteStream(offHelpers.objectPath() + '/' + file)
fileStream.on('finish', ->
# transfer is finished, the complete file is in the cache
- if offHelpers.checkIntegrity offHelpers.objectPath() + '/' + file
- readStream = fs.createReadStream(offHelpers.objectPath() + '/' + file)
- readStream.pipe(process.stdout)
+ # transform
+ if offHelpers.transform() == 'enable' and offHelpers.transformFrom() != ''
+ transport['transformFrom'](file)
+ if offHelpers.checkIntegrity offHelpers.objectPath() + '/../tmp/' + file
+ readStream = fs.createReadStream(offHelpers.objectPath() + '/../tmp/' + file)
+ readStream.pipe(process.stdout)
+ else
+ if offHelpers.checkIntegrity offHelpers.objectPath() + '/' + file
+ readStream = fs.createReadStream(offHelpers.objectPath() + '/' + file)
+ readStream.pipe(process.stdout)
return
)
s3.getObject(dlParams, (err, data) ->
@@ -651,6 +707,20 @@ transport =
# send data to stdout
# to be initialized by setTransport
return
+ 'transformFrom': (file) ->
+ # transform file in objectPath to objectPath/../tmp/file
+ # create directories in tmp
+ f_l = file.split('/')
+ offFile = f_l[f_l.length-1]
+ offFilePath = offHelpers.getOffFilePath(offFile)
+ if fs.existsSync(offHelpers.objectPath() + '/../tmp/' + offFilePath[1]) == false
+ # create the file directory
+ mkdirParents offHelpers.objectPath() + '/../tmp/' + offFilePath[1]
+
+ cmd = offHelpers.transformFrom()
+ cmd = cmd.replace('_1', offHelpers.objectPath() + '/' + file).replace('_2', offHelpers.objectPath() + '/../tmp/' + file)
+ r = syncexec cmd
+ return
# command line functions
@@ -733,6 +803,12 @@ offCommands =
setCfg('off.s3Region', offDEFAULTS.s3Region)
if offHelpers.s3Bucket() == '' or setCfg != gitConfig.set
setCfg('off.s3Bucket', offDEFAULTS.s3Bucket)
+ if offHelpers.transform() == '' or setCfg != gitConfig.set
+ setCfg('off.transform', offDEFAULTS.transform)
+ if offHelpers.transformTo() == '' or setCfg != gitConfig.set
+ setCfg('off.transformTo', offDEFAULTS.transformTo)
+ if offHelpers.transformFrom() == '' or setCfg != gitConfig.set
+ setCfg('off.transformFrom', offDEFAULTS.transformFrom)
# create off.store
@@ -876,6 +952,33 @@ offCommands =
console.log 'off.s3bucket '.blue.bold + offHelpers.s3Bucket()
return
+ 'transform': (setCfg) ->
+ len = process.argv.length
+ setting = process.argv[len-1]
+ if setting != 'transform' and setting != 'thisrepo'
+ setCfg('off.transform', setting)
+ else
+ console.log 'off.transform '.blue.bold + offHelpers.transform()
+ return
+
+ 'transformTo': (setCfg) ->
+ len = process.argv.length
+ setting = process.argv[len-1]
+ if setting != 'transformTo' and setting != 'thisrepo'
+ setCfg('off.transformTo', setting)
+ else
+ console.log 'off.transformTo '.blue.bold + offHelpers.transformTo()
+ return
+
+ 'transformFrom': (setCfg) ->
+ len = process.argv.length
+ setting = process.argv[len-1]
+ if setting != 'transformFrom' and setting != 'thisrepo'
+ setCfg('off.transformFrom', setting)
+ else
+ console.log 'off.transformFrom '.blue.bold + offHelpers.transformFrom()
+ return
+
'clean': ->
# replace files handled by git off with reference
# stdin is data from the working directory file
@@ -883,6 +986,7 @@ offCommands =
# create local setup in case the repo is freshly cloned
# create file information (size, sha = git off filename)
# copy stdin to git off cache in current git
+ # transform input file
# print git off ref to stdout for git
# create local setup in case the repo is freshly cloned
@@ -905,9 +1009,31 @@ offCommands =
if fs.existsSync(offHelpers.objectPath() + '/' + offFilePath) == false
# stream stdin to sha in git off cache
writeStream = fs.createWriteStream(offHelpers.objectPath() + '/' + offFilePath)
- process.stdin.pipe(writeStream)
- # all objects in cache are read-only
- fs.chmodSync offHelpers.objectPath() + '/' + offFilePath, '444'
+ pipe = process.stdin.pipe(writeStream)
+ pipe.on('finish', ->
+ # all objects in cache are read-only
+ fs.chmodSync offHelpers.objectPath() + '/' + offFilePath, '444'
+
+ # transform input file
+ if offHelpers.transform() == 'enable' and offHelpers.transformTo() != ''
+ if fs.existsSync offHelpers.objectPath() + '/../tmp' == false
+ mkdirParents offHelpers.objectPath() + '/../tmp'
+
+ # create directories in tmp
+ offFilePath = offHelpers.getOffFilePath(offFile)
+ if fs.existsSync(offHelpers.objectPath() + '/../tmp/' + offFilePath[1]) == false
+ # create the file directory
+ mkdirParents offHelpers.objectPath() + '/../tmp/' + offFilePath[1]
+ offFilePath = offFilePath[0]
+
+ # original file is moved to tmp
+ # transformed file is stored in objectPath under the same name
+ exec 'mv', [offHelpers.objectPath() + '/' + offFilePath, offHelpers.objectPath() + '/../tmp/' + offFilePath]
+ cmd = offHelpers.transformTo()
+ cmd = cmd.replace('_1', offHelpers.objectPath() + '/../tmp/' + offFilePath).replace('_2', offHelpers.objectPath() + '/' + offFilePath)
+ r = syncexec cmd
+ return
+ )
else
# file already exists, discard data
writeStream = fs.createWriteStream('/dev/null')
@@ -1033,7 +1159,24 @@ offCommands =
# detect if file is already in cache
if fs.existsSync(offHelpers.objectPath() + '/' + offFilePath) == true
# copy from cache
- readStream = fs.createReadStream(offHelpers.objectPath() + '/' + offFilePath)
+ # transform file
+ if offHelpers.transform() == 'enable' and offHelpers.transformFrom() != ''
+ if fs.existsSync offHelpers.objectPath() + '/../tmp' == false
+ mkdirParents offHelpers.objectPath() + '/../tmp'
+
+ # create directories in tmp
+ offFilePath = offHelpers.getOffFilePath(offFile)
+ if fs.existsSync(offHelpers.objectPath() + '/../tmp/' + offFilePath[1]) == false
+ # create the file directory
+ mkdirParents offHelpers.objectPath() + '/../tmp/' + offFilePath[1]
+ offFilePath = offFilePath[0]
+
+ cmd = offHelpers.transformFrom()
+ cmd = cmd.replace('_1', offHelpers.objectPath() + '/' + offFilePath).replace('_2', offHelpers.objectPath() + '/../tmp/' + offFilePath)
+ r = syncexec cmd
+ readStream = fs.createReadStream(offHelpers.objectPath() + '/../tmp/' + offFilePath)
+ else
+ readStream = fs.createReadStream(offHelpers.objectPath() + '/' + offFilePath)
readStream.pipe(process.stdout)
else
# copy from off.store
@@ -1097,6 +1240,9 @@ offCommands =
console.log 'off.configAlways '.blue.bold + offHelpers.offConfigAlways()
console.log 'off.s3region '.blue.bold + offHelpers.s3Region()
console.log 'off.s3bucket '.blue.bold + offHelpers.s3Bucket()
+ console.log 'off.transform '.blue.bold + offHelpers.transform()
+ console.log 'off.transformTo '.blue.bold + offHelpers.transformTo()
+ console.log 'off.transformFrom '.blue.bold + offHelpers.transformFrom()
return
'help': ->
@@ -1245,6 +1391,24 @@ COMMAND_MAP =
return
h: 'git off s3bucket [thisrepo] [bucket]\n setup amazon s3 bucket'
+ 'transform':
+ f: ->
+ thisrepo offCommands['transform']
+ return
+ h: 'git off transform [thisrepo] [enable|disable]\n enable transform in clean and smudge filters'
+
+ 'transformTo':
+ f: ->
+ thisrepo offCommands['transformTo']
+ return
+ h: "git off transformTo [thisrepo] ['cmd _1 _2']\n setup transform command for clear filter\n When the command is empty the regular transport is performed"
+
+ 'transformFrom':
+ f: ->
+ thisrepo offCommands['transformFrom']
+ return
+ h: "git off transformFrom [thisrepo] ['cmd _1 _2']\n setup transform command for smudge filter\n When the command is empty the regular transport is performed"
+
'clean':
f: ->
offCommands.clean()