diff --git a/.astylerc b/.astylerc new file mode 100644 index 000000000..ef70741d4 --- /dev/null +++ b/.astylerc @@ -0,0 +1,6 @@ +--style=linux +--indent=spaces=2 +--attach-namespaces +--attach-classes +--attach-inlines +--attach-extern-c diff --git a/.gitignore b/.gitignore index 5188a2e6a..64ce99175 100644 --- a/.gitignore +++ b/.gitignore @@ -5,10 +5,11 @@ /build/ /test/coverage/ /test/repos/ -/test/test/repos/ +/test/test/ /src/ /include/ /lib/enums.js +/lib/nodegit.js /vendor/Release @@ -16,7 +17,10 @@ /vendor/*.filters /vendor/*.sln -/generate/idefs.json +/generate/output +/generate/**/*.json +!/generate/input/*.json + /generate/missing-tests.json /binding.gyp diff --git a/.npmignore b/.npmignore index 5b5b9304e..db6d57ba3 100644 --- a/.npmignore +++ b/.npmignore @@ -1,4 +1,22 @@ -/doc/ +/build/ /example/ -/node_modules/ +/generate/ /test/ +/vendor/libgit2/ +/vendor/libssh2/ +/vendor/http_parser/ +/vendor/Release/ + +.astylerc +.editorconfig +.gitignore +.gitmodules +.jshintrc +.travis.yml +.appveyor.yml + +*.vcxproj +*.filters +*.sln +*.log +*.md diff --git a/.travis.yml b/.travis.yml index 8a78fce67..f1390db23 100644 --- a/.travis.yml +++ b/.travis.yml @@ -23,6 +23,8 @@ before_script: - chmod 600 ~/.ssh/id_rsa* - eval `ssh-agent -s` - ssh-add ~/.ssh/id_rsa + - git config --global user.name "John Doe" + - git config --global user.email johndoe@example.com git: depth: 1 branches: @@ -35,3 +37,9 @@ script: npm test notifications: slack: secure: KglNSqZiid9YudCwkPFDh+sZfW5BwFlM70y67E4peHwwlbbV1sSBPHcs74ZHP/lqgEZ4hMv4N2NI58oYFD5/1a+tKIQP1TkdIMuq4j2LXheuirA2HDcydOVrsC8kRx5XFGKdVRg/uyX2dlRHcOWFhxrS6yc6IxtxYWlRTD2SmEc= + webhooks: + urls: + - https://webhooks.gitter.im/e/cbafdb27ad32ba746a73 + on_success: always # options: [always|never|change] default: always + on_failure: always # options: [always|never|change] default: always + on_start: false # default: false diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index b8f6b7ce1..52a55d148 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,81 +1,27 @@ -Test Contribution Guidelines ----------------------------- +Contribution Guidelines +----------------------- -This is a guide on how to contribute test cases to help with coverage testing for NodeGit. +### A Note on Issues and Support ## -## Getting Started ## +We try to be available pretty often to help when problems come up. We like to split incoming questions +into two categories: potential bugs/features, and questions. If you want a feature added, or think you've found a bug +in the code (or in the examples), search the [issue tracker](https://github.com/nodegit/nodegit/issues) and if you dont +find anything, file a new issue. If you just have questions, instead of using issues, contact us in our [Gitter room](https://gitter.im/nodegit/nodegit). -Currently there are a number of fields and functions in NodeGit that have no tests at all. A list of which files are missing and what fields and functions need tests can be generated by running +## How to Help ## -``` bash -npm run missing-tests -``` +NodeGit is iterating pretty quickly, but it can always go faster. We welcome help with the deeper darker parts, +like the templates and binding and more, but there are plenty of smaller things to do as well. +Things that are always needed: + - Filing issues (see above). + - Writing tests (See [here](https://github.com/nodegit/nodegit/blob/master/TESTING.md)). + - Writing examples. -This will make the file `generate/missing-tests.json` which will contain info for tests or files that are currently missing. +These are all good easy ways to start getting involved with the project. You can also look through the issue tracker +and see if you can help with any existing issues. Please comment with your intention and any questions before getting +started; duplicating work or doing something that would be rejected always sucks. -From this file you can find fields and functions that don't have any tests yet and pick one to work on. +Additionally, [the documentation](http://www.nodegit.org) needs some love. Get in touch with one of us on Gitter if +you'd like to lend a hand with that. -## Adding a test ## - -After you find a test that's missing the next step is to find or add the file that you need to add it into. You can always use other tests in the directory as a guide for writing more. All new files will be automatically added during a test run. - -In the `missing-tests.json` file you'll see it formatted like so: - -```json -{ - "{className}":{ - "fields": [], - "functions": [] - } -} -``` - -In the file each `{className}` corresponds to a file found at `test/tests/{classname}`. Each entry in either `fields` or `functions` is a missing test for the respective field/function. - -In the file that your test is going in you can just append it to the file inside of the `describe` function block. - -It can be helpful to reference the [libgit2 API docs](https://libgit2.github.com/libgit2/#v0.21.2) to know what the field or function is doing inside of libgit2 and referencing the [NodeGit API docs](http://www.nodegit.org/) can also help. Looking at examples inside of `/example` can show you how we wrap the libgit2 library and how you can call into it from JavaScript. - -The idea is to test the basic functionality of the field/function and to confirm that it's returning or setting the value(s) correctly. Bugs inside of libgit2 will have to either have a work-around or be ignored. - -If a specific field or function is further wrapped via a file inside of `/lib` then as long as that wrapper is called and tested. - -You can mark something to be ignored inside of the `/generate/missing-tests-ignore.json` file. - -After you write your test make sure to run `npm run missing-tests` again to confirm that the field/function that a test was written for no longer shows up. - -## Test results ## - -### The test passes ### - -Excellent!! Make sure that the test is working correctly and testing what you're expecting it to test and then move onto the [next section](https://github.com/nodegit/nodegit/tree/master/test#making-a-pull-request). - -### The test fails ### - -This is also great! You just found something that wasn't properly covered in our generate scripts for wrapping libgit2. We'll have to further analyze what's going on and figure out how to fix it. - -For bonus points you could also include a fix in your pull request but that step is optional. - -## Making a pull request ## - -So you made your self a new test for NodeGit and now you want to add it to the main repo? That's great! We'll try and make the process as simple and easy as possible for you. - -So assuming that you have a fork of the repo make a new branch that's labeled `new-tests-{className}` where {className} is the name of the file you added the tests to. Also, make sure you check the [main repo's pull request list](https://github.com/nodegit/nodegit/pulls) and see if somebody else is editing that file before you make your PR. They might have added a test already that's waiting to get merged in. - -So after you have your branch and your change is ready to go make sure your subjects for your commits contain the {className} of the tests you added and then list each new field/function being tested inside of the subject of the commit message. - -Example: - -``` -Added tests for oid - -fromString -allocfmt -inspect -``` - -This will help us know what each commit contains at a glance and should expedite merging your pull request. - -If your test is failing, TravisCI should pick it up and note it on the PR. PR's that add failing tests will have to be handled on a case-by-case basis but please don't let that stop you from staring a PR. - -Please don't start a PR until you're finished (no WIP test PRs please!). +For anything else, Gitter is probably the best way to get in touch as well. Happy coding, merge you soon! diff --git a/README.md b/README.md index 8cec4a48b..8a8141f7b 100644 --- a/README.md +++ b/README.md @@ -6,8 +6,9 @@ NodeGit [![Build Status](https://travis-ci.org/nodegit/nodegit.png)](https://travis-ci.org/nodegit/nodegit) Build Status: Windows +[![Gitter](https://badges.gitter.im/Join Chat.svg)](https://gitter.im/nodegit/nodegit?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) -**Stable: 0.2.0** +**Stable: 0.2.4** Maintained by Tim Branyen [@tbranyen](http://twitter.com/tbranyen), Michael Robinson [@codeofinterest](http://twitter.com/codeofinterest), John Haley [@johnhaley81](http://twitter.com/johnhaley81), Max Korp [@maxkorp](http://twitter.com/MaximilianoKorp), and Nick Kallen [@nk](http://twitter.com/nk) with help from [awesome contributors](https://github.com/nodegit/nodegit/contributors)! @@ -185,3 +186,12 @@ https://github.com/nodegit/nodegit/compare/refs/tags/0.1.4...0.2.0 This update is wholly and entirely a breaking one, and older versions won't be maintained. For the purpose of migration, perhaps the biggest point to make is that async methods can now use promises, rather than just taking callbacks. Additionally, lots of method and property names have changed. + +## Node-Webkit ## + +A common issue is with nodegit not functioning properly inside of +[node-webkit](http://github.com/rogerwang/node-webkit) applications. Because nodegit +is a native module, it has to be rebuilt for node-webkit using +[nw-gyp](http://github.com/rogerwang/nw-gyp). By default, nodegit will look in the root package's package.json for an `engines` property, and within look for a `node-webkit` property that holds a specific version of node-webkit. The value of this property is what will get passed as the `--target` argument to `nw-gyp configure`. + +Currently, support for node-webkit is limited, although we intend to support it better in the future. diff --git a/TESTING.md b/TESTING.md new file mode 100644 index 000000000..d7a981aad --- /dev/null +++ b/TESTING.md @@ -0,0 +1,81 @@ +Test Contribution Guidelines +---------------------------- + +This is a guide on how to contribute test cases to help with coverage testing for NodeGit. + +## Getting Started ## + +Currently there are a number of fields and functions in NodeGit that have no tests at all. A list of which files are missing and what fields and functions need tests can be generated by running + +``` bash +npm run generateMissingTests +``` + +This will make the file `generate/output/missing-tests.json` which will contain info for tests or files that are currently missing. + +From this file you can find fields and functions that don't have any tests yet and pick one to work on. + +## Adding a test ## + +After you find a test that's missing the next step is to find or add the file that you need to add it into. You can always use other tests in the directory as a guide for writing more. All new files will be automatically added during a test run. + +In the `missing-tests.json` file you'll see it formatted like so: + +```json +{ + "{className}":{ + "fields": [], + "functions": [] + } +} +``` + +In the file each `{className}` corresponds to a file found at `test/tests/{classname}`. Each entry in either `fields` or `functions` is a missing test for the respective field/function. + +In the file that your test is going in you can just append it to the file inside of the `describe` function block. + +It can be helpful to reference the [libgit2 API docs](https://libgit2.github.com/libgit2/#v0.21.2) to know what the field or function is doing inside of libgit2 and referencing the [NodeGit API docs](http://www.nodegit.org/) can also help. Looking at examples inside of `/example` can show you how we wrap the libgit2 library and how you can call into it from JavaScript. + +The idea is to test the basic functionality of the field/function and to confirm that it's returning or setting the value(s) correctly. Bugs inside of libgit2 will have to either have a work-around or be ignored. + +If a specific field or function is further wrapped via a file inside of `/lib` then as long as that wrapper is called and tested. + +You can mark something to be ignored inside of the `/generate/missing-tests-ignore.json` file. + +After you write your test make sure to run `npm run generateMissingTests` again to confirm that the field/function that a test was written for no longer shows up. + +## Test results ## + +### The test passes ### + +Excellent!! Make sure that the test is working correctly and testing what you're expecting it to test and then move onto the [next section](https://github.com/nodegit/nodegit/tree/master/test#making-a-pull-request). + +### The test fails ### + +This is also great! You just found something that wasn't properly covered in our generate scripts for wrapping libgit2. We'll have to further analyze what's going on and figure out how to fix it. + +For bonus points you could also include a fix in your pull request but that step is optional. + +## Making a pull request ## + +So you made your self a new test for NodeGit and now you want to add it to the main repo? That's great! We'll try and make the process as simple and easy as possible for you. + +So assuming that you have a fork of the repo make a new branch that's labeled `new-tests-{className}` where {className} is the name of the file you added the tests to. Also, make sure you check the [main repo's pull request list](https://github.com/nodegit/nodegit/pulls) and see if somebody else is editing that file before you make your PR. They might have added a test already that's waiting to get merged in. + +So after you have your branch and your change is ready to go make sure your subjects for your commits contain the {className} of the tests you added and then list each new field/function being tested inside of the subject of the commit message. + +Example: + +``` +Added tests for oid + +fromString +allocfmt +inspect +``` + +This will help us know what each commit contains at a glance and should expedite merging your pull request. + +If your test is failing, TravisCI should pick it up and note it on the PR. PR's that add failing tests will have to be handled on a case-by-case basis but please don't let that stop you from staring a PR. + +Please don't start a PR until you're finished (no WIP test PRs please!). diff --git a/appveyor.yml b/appveyor.yml index 47e35efe3..23f3768a6 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -14,6 +14,8 @@ clone_folder: c:\projects\nodegit # fix lineendings in Windows init: - git config --global core.autocrlf input + - git config --global user.name "John Doe" + - git config --global user.email johndoe@example.com # what combinations to test environment: @@ -39,3 +41,13 @@ test_script: - cmd: npm test build: off + +notifications: + - provider: Slack + auth_token: + secure: ZsaMCvRMfDZhNsiUvZtvszXXF3z4pLIGJmAj5MuDaa40JvmMC6wnBWIR+LHJuJPM + channel: nodegit + +branches: + only: + - master diff --git a/example/create-new-repo.js b/example/create-new-repo.js index c934476d9..91be1ea54 100644 --- a/example/create-new-repo.js +++ b/example/create-new-repo.js @@ -14,37 +14,29 @@ var index; fse.ensureDir(path.resolve(__dirname, repoDir)) .then(function() { - console.log('a'); return nodegit.Repository.init(path.resolve(__dirname, repoDir), 0); }) .then(function(repo) { - console.log('b'); repository = repo; return fse.writeFile(path.join(repository.workdir(), fileName), fileContent); }) .then(function(){ - console.log('c'); return repository.openIndex(); }) .then(function(idx) { - console.log('d'); index = idx; return index.read(1); }) .then(function() { - console.log('e'); return index.addByPath(fileName); }) .then(function() { - console.log('f'); return index.write(); }) .then(function() { - console.log('g'); return index.writeTree(); }) .then(function(oid) { - console.log('j'); var author = nodegit.Signature.create("Scott Chacon", "schacon@gmail.com", 123456789, 60); var committer = nodegit.Signature.create("Scott A Chacon", "scott@github.com", 987654321, 90); diff --git a/example/details-for-tree-entry.js b/example/details-for-tree-entry.js index c17c6d065..2197fed6f 100644 --- a/example/details-for-tree-entry.js +++ b/example/details-for-tree-entry.js @@ -7,8 +7,7 @@ var path = require('path'); nodegit.Repository.open(path.resolve(__dirname, '../.git')) .then(function(repo) { - return repo.getTree( - nodegit.Oid.fromString("e1b0c7ea57bfc5e30ec279402a98168a27838ac9")) + return repo.getTree("e1b0c7ea57bfc5e30ec279402a98168a27838ac9") .then(function(tree) { var treeEntry = tree.entryByIndex(0); diff --git a/example/fetch.js b/example/fetch.js index 927a2b8db..29bf0a454 100644 --- a/example/fetch.js +++ b/example/fetch.js @@ -2,11 +2,11 @@ var nodegit = require('../'); var path = require('path'); nodegit.Repository.open(path.resolve(__dirname, '../.git')).then(function(repo) { - return nodegit.Remote.load(repo, "origin"); -}).then(function(remote) { - remote.connect(0); - return remote.download(); -}) -.done(function() { + return repo.fetch("origin", { + credentials: function(url, userName) { + return nodegit.Cred.sshKeyFromAgent(userName); + } + }); +}).done(function() { console.log("It worked!"); }); diff --git a/generate/filters/and.js b/generate/combyne/filters/and.js similarity index 100% rename from generate/filters/and.js rename to generate/combyne/filters/and.js diff --git a/generate/filters/args_info.js b/generate/combyne/filters/args_info.js similarity index 100% rename from generate/filters/args_info.js rename to generate/combyne/filters/args_info.js diff --git a/generate/filters/cpp_to_v8.js b/generate/combyne/filters/cpp_to_v8.js similarity index 100% rename from generate/filters/cpp_to_v8.js rename to generate/combyne/filters/cpp_to_v8.js diff --git a/generate/filters/default_value.js b/generate/combyne/filters/default_value.js similarity index 100% rename from generate/filters/default_value.js rename to generate/combyne/filters/default_value.js diff --git a/generate/filters/fields_info.js b/generate/combyne/filters/fields_info.js similarity index 100% rename from generate/filters/fields_info.js rename to generate/combyne/filters/fields_info.js diff --git a/generate/filters/has_return_type.js b/generate/combyne/filters/has_return_type.js similarity index 100% rename from generate/filters/has_return_type.js rename to generate/combyne/filters/has_return_type.js diff --git a/generate/filters/has_returns.js b/generate/combyne/filters/has_returns.js similarity index 100% rename from generate/filters/has_returns.js rename to generate/combyne/filters/has_returns.js diff --git a/generate/filters/is_double_pointer.js b/generate/combyne/filters/is_double_pointer.js similarity index 100% rename from generate/filters/is_double_pointer.js rename to generate/combyne/filters/is_double_pointer.js diff --git a/generate/combyne/filters/is_oid.js b/generate/combyne/filters/is_oid.js new file mode 100644 index 000000000..1d13f700f --- /dev/null +++ b/generate/combyne/filters/is_oid.js @@ -0,0 +1,3 @@ +module.exports = function(arg) { + return arg.cppClassName == "GitOid" && !arg.isSelf && !arg.isReturn; +}; diff --git a/generate/filters/is_pointer.js b/generate/combyne/filters/is_pointer.js similarity index 100% rename from generate/filters/is_pointer.js rename to generate/combyne/filters/is_pointer.js diff --git a/generate/filters/is_v8_value.js b/generate/combyne/filters/is_v8_value.js similarity index 100% rename from generate/filters/is_v8_value.js rename to generate/combyne/filters/is_v8_value.js diff --git a/generate/filters/js_args_count.js b/generate/combyne/filters/js_args_count.js similarity index 100% rename from generate/filters/js_args_count.js rename to generate/combyne/filters/js_args_count.js diff --git a/generate/filters/or.js b/generate/combyne/filters/or.js similarity index 100% rename from generate/filters/or.js rename to generate/combyne/filters/or.js diff --git a/generate/filters/payload_for.js b/generate/combyne/filters/payload_for.js similarity index 100% rename from generate/filters/payload_for.js rename to generate/combyne/filters/payload_for.js diff --git a/generate/filters/replace.js b/generate/combyne/filters/replace.js similarity index 100% rename from generate/filters/replace.js rename to generate/combyne/filters/replace.js diff --git a/generate/filters/returns_count.js b/generate/combyne/filters/returns_count.js similarity index 100% rename from generate/filters/returns_count.js rename to generate/combyne/filters/returns_count.js diff --git a/generate/filters/returns_info.js b/generate/combyne/filters/returns_info.js similarity index 100% rename from generate/filters/returns_info.js rename to generate/combyne/filters/returns_info.js diff --git a/generate/filters/title_case.js b/generate/combyne/filters/title_case.js similarity index 100% rename from generate/filters/title_case.js rename to generate/combyne/filters/title_case.js diff --git a/generate/filters/un_pointer.js b/generate/combyne/filters/un_pointer.js similarity index 100% rename from generate/filters/un_pointer.js rename to generate/combyne/filters/un_pointer.js diff --git a/generate/filters/upper.js b/generate/combyne/filters/upper.js similarity index 100% rename from generate/filters/upper.js rename to generate/combyne/filters/upper.js diff --git a/generate/manual/include/functions/copy.h b/generate/combyne/manual/include/functions/copy.h similarity index 100% rename from generate/manual/include/functions/copy.h rename to generate/combyne/manual/include/functions/copy.h diff --git a/generate/manual/include/macros.h b/generate/combyne/manual/include/macros.h similarity index 100% rename from generate/manual/include/macros.h rename to generate/combyne/manual/include/macros.h diff --git a/generate/manual/include/wrapper.h b/generate/combyne/manual/include/wrapper.h similarity index 100% rename from generate/manual/include/wrapper.h rename to generate/combyne/manual/include/wrapper.h diff --git a/generate/manual/src/functions/copy.cc b/generate/combyne/manual/src/functions/copy.cc similarity index 100% rename from generate/manual/src/functions/copy.cc rename to generate/combyne/manual/src/functions/copy.cc diff --git a/generate/manual/src/wrapper.cc b/generate/combyne/manual/src/wrapper.cc similarity index 100% rename from generate/manual/src/wrapper.cc rename to generate/combyne/manual/src/wrapper.cc diff --git a/generate/partials/async_function.cc b/generate/combyne/partials/async_function.cc similarity index 94% rename from generate/partials/async_function.cc rename to generate/combyne/partials/async_function.cc index cf6dd38b3..95f26fdf2 100644 --- a/generate/partials/async_function.cc +++ b/generate/combyne/partials/async_function.cc @@ -20,6 +20,9 @@ NAN_METHOD({{ cppClassName }}::{{ cppFunctionName }}) { {%partial convertFromV8 arg%} {%if not arg.isPayload %} baton->{{ arg.name }} = from_{{ arg.name }}; + {%if arg | isOid %} + baton->{{ arg.name }}NeedsFree = args[{{ arg.jsArg }}]->IsString(); + {%endif%} {%endif%} {%endif%} {%elsif arg.shouldAlloc %} @@ -129,6 +132,10 @@ void {{ cppClassName }}::{{ cppFunctionName }}Worker::HandleOKCallback() { {%else%} free((void *)baton->{{ arg.name }}); {%endif%} + {%elsif arg | isOid %} + if (baton->{{ arg.name}}NeedsFree) { + free((void *)baton->{{ arg.name }}); + } {%endif%} {%endeach%} diff --git a/generate/partials/convert_from_v8.cc b/generate/combyne/partials/convert_from_v8.cc similarity index 61% rename from generate/partials/convert_from_v8.cc rename to generate/combyne/partials/convert_from_v8.cc index 788f24e27..638f4f016 100644 --- a/generate/partials/convert_from_v8.cc +++ b/generate/combyne/partials/convert_from_v8.cc @@ -1,6 +1,7 @@ {%if not isPayload %} +// start convert_from_v8 block {{ cType }} from_{{ name }}; - {%if isOptional %} + {%if isOptional | or isBoolean %} if (args[{{ jsArg }}]->Is{{ cppClassName|cppToV8 }}()) { {%endif%} @@ -9,7 +10,7 @@ String::Utf8Value {{ name }}(args[{{ jsArg }}]->ToString()); from_{{ name }} = ({{ cType }}) strdup(*{{ name }}); {%elsif cppClassName == 'Wrapper'%} - + String::Utf8Value {{ name }}(args[{{ jsArg }}]->ToString()); from_{{ name }} = ({{ cType }}) strdup(*{{ name }}); {%elsif cppClassName == 'Array'%} @@ -33,6 +34,35 @@ {%else%} from_{{ name }} = ({{ cType }}) {{ cast }} {%if isEnum %}(int){%endif%} args[{{ jsArg }}]->To{{ cppClassName }}()->Value(); {%endif%} + {%elsif cppClassName == 'GitOid'%} + if (args[{{ jsArg }}]->IsString()) { + // Try and parse in a string to a git_oid + String::Utf8Value oidString(args[{{ jsArg }}]->ToString()); + git_oid *oidOut = (git_oid *)malloc(sizeof(git_oid)); + + if (git_oid_fromstr(oidOut, (const char *) strdup(*oidString)) != GIT_OK) { + free(oidOut); + + if (giterr_last()) { + return NanThrowError(giterr_last()->message); + } else { + return NanThrowError("Unknown Error"); + } + } + + {%if cType|isDoublePointer %} + from_{{ name }} = &oidOut; + {%else%} + from_{{ name }} = oidOut; + {%endif%} + } + else { + {%if cType|isDoublePointer %} + from_{{ name }} = ObjectWrap::Unwrap<{{ cppClassName }}>(args[{{ jsArg }}]->ToObject())->GetRefValue(); + {%else%} + from_{{ name }} = ObjectWrap::Unwrap<{{ cppClassName }}>(args[{{ jsArg }}]->ToObject())->GetValue(); + {%endif%} + } {%else%} {%if cType|isDoublePointer %} from_{{ name }} = ObjectWrap::Unwrap<{{ cppClassName }}>(args[{{ jsArg }}]->ToObject())->GetRefValue(); @@ -41,11 +71,17 @@ {%endif%} {%endif%} - {%if isOptional %} + {%if isBoolean %} + } + else { + from_{{ name }} = args[{{ jsArg }}]->IsTrue() ? 1 : 0; + } + {%elsif isOptional %} } else { from_{{ name }} = 0; } {%endif%} +// end convert_from_v8 block {%endif%} diff --git a/generate/combyne/partials/convert_to_v8.cc b/generate/combyne/partials/convert_to_v8.cc new file mode 100644 index 000000000..1d2053791 --- /dev/null +++ b/generate/combyne/partials/convert_to_v8.cc @@ -0,0 +1,65 @@ +// start convert_to_v8 block +{% if cppClassName == 'String' %} + if ({{= parsedName =}}){ + {% if size %} + to = NanNew({{= parsedName =}}, {{ size }}); + {% elsif cType == 'char **' %} + to = NanNew(*{{= parsedName =}}); + {% else %} + to = NanNew({{= parsedName =}}); + {% endif %} + } + else { + to = NanNull(); + } + + {% if freeFunctionName %} + {{ freeFunctionName }}({{= parsedName =}}); + {% endif %} + +{% elsif cppClassName|isV8Value %} + + {% if isCppClassIntType %} + to = NanNew<{{ cppClassName }}>(({{ parsedClassName }}){{= parsedName =}}); + {% else %} + to = NanNew<{{ cppClassName }}>({{= parsedName =}}); + {% endif %} + +{% elsif cppClassName == 'External' %} + + to = NanNew((void *){{= parsedName =}}); + +{% elsif cppClassName == 'Array' %} + + {%-- // FIXME this is not general purpose enough. --%} + {% if size %} + Local tmpArray = NanNew({{= parsedName =}}->{{ size }}); + for (unsigned int i = 0; i < {{= parsedName =}}->{{ size }}; i++) { + tmpArray->Set(NanNew(i), NanNew({{= parsedName =}}->{{ key }}[i])); + } + {% else %} + Local tmpArray = NanNew({{= parsedName =}}); + {% endif %} + + to = tmpArray; +{% else %} + {% if copy %} + if ({{= parsedName =}} != NULL) { + {{= parsedName =}} = ({{ cType|replace '**' '*' }} {% if not cType|isPointer %}*{% endif %}){{ copy }}({{= parsedName =}}); + } + {% endif %} + + if ({{= parsedName =}} != NULL) { + // {{= cppClassName }} {{= parsedName }} + {% if cppClassName == 'Wrapper' %} + to = {{ cppClassName }}::New((void *){{= parsedName =}}); + {% else %} + to = {{ cppClassName }}::New((void *){{= parsedName =}}, false); + {% endif %} + } + else { + to = NanNull(); + } + +{% endif %} +// end convert_to_v8 block diff --git a/generate/combyne/partials/doc.cc b/generate/combyne/partials/doc.cc new file mode 100644 index 000000000..53094312b --- /dev/null +++ b/generate/combyne/partials/doc.cc @@ -0,0 +1,15 @@ +/* +{% each args as arg %} + {% if not arg.isReturn %} + {% if not arg.isSelf %} + * @param {{ arg.jsClassName }} {{ arg.name }} + {% endif %} + {% endif %} +{% endeach %}{% each .|returnsInfo as returnInfo %} + {% if isAsync %} + * @param {{ returnInfo.jsOrCppClassName }} callback + {% else %} + * @return {{ returnInfo.jsOrCppClassName }} {% if returnInfo.name %}{{ returnInfo.name }}{% else %}result{% endif %} + {% endif %} +{% endeach %} + */ diff --git a/generate/combyne/partials/field_accessors.cc b/generate/combyne/partials/field_accessors.cc new file mode 100644 index 000000000..a980f4b22 --- /dev/null +++ b/generate/combyne/partials/field_accessors.cc @@ -0,0 +1,224 @@ +{% each fields|fieldsInfo as field %} + {% if not field.ignore %} + NAN_GETTER({{ cppClassName }}::Get{{ field.cppFunctionName }}) { + NanScope(); + + {{ cppClassName }} *wrapper = ObjectWrap::Unwrap<{{ cppClassName }}>(args.This()); + + {% if field.isEnum %} + NanReturnValue(NanNew((int)wrapper->GetValue()->{{ field.name }})); + + {% elsif field.isLibgitType | or field.payloadFor %} + NanReturnValue(wrapper->{{ field.name }}); + + {% elsif field.isCallbackFunction %} + NanReturnValue(wrapper->{{ field.name }}->GetFunction()); + + {% elsif field.cppClassName == 'String' %} + if (wrapper->GetValue()->{{ field.name }}) { + NanReturnValue(NanNew(wrapper->GetValue()->{{ field.name }})); + } + else { + NanReturnUndefined(); + } + + {% elsif field.cppClassName|isV8Value %} + NanReturnValue(NanNew<{{ field.cppClassName }}>(wrapper->GetValue()->{{ field.name }})); + {% endif %} + } + + NAN_SETTER({{ cppClassName }}::Set{{ field.cppFunctionName }}) { + NanScope(); + + {{ cppClassName }} *wrapper = ObjectWrap::Unwrap<{{ cppClassName }}>(args.This()); + + {% if field.isEnum %} + if (value->IsNumber()) { + wrapper->GetValue()->{{ field.name }} = ({{ field.cType }}) value->Int32Value(); + } + + {% elsif field.isLibgitType %} + NanDisposePersistent(wrapper->{{ field.name }}); + wrapper->raw->{{ field.name }} = {% if not field.cType | isPointer %}*{% endif %}ObjectWrap::Unwrap<{{ field.cppClassName }}>(value->ToObject())->GetValue(); + + {% elsif field.isCallbackFunction %} + if (value->IsFunction()) { + wrapper->{{ field.name }} = new NanCallback(value.As()); + } + + {% elsif field.payloadFor %} + NanAssignPersistent(wrapper->{{ field.name }}, value); + + {% elsif field.cppClassName == 'String' %} + if (wrapper->GetValue()->{{ field.name }}) { + } + + String::Utf8Value str(value); + wrapper->GetValue()->{{ field.name }} = strdup(*str); + + {% elsif field.isCppClassIntType %} + if (value->IsNumber()) { + wrapper->GetValue()->{{ field.name }} = value->{{field.cppClassName}}Value(); + } + + {% else %} + if (value->IsNumber()) { + wrapper->GetValue()->{{ field.name }} = ({{ field.cType }}) value->Int32Value(); + } + {% endif %} + } + + {% if field.isCallbackFunction %} + {{ field.returnType }} {{ cppClassName }}::{{ field.name }}_cppCallback ( + {% each field.args|argsInfo as arg %} + {{ arg.cType }} {{ arg.name}}{% if not arg.lastArg %},{% endif %} + {% endeach %} + ) { + {{ field.name|titleCase }}Baton* baton = new {{ field.name|titleCase }}Baton(); + + {% each field.args|argsInfo as arg %} + baton->{{ arg.name }} = {{ arg.name }}; + {% endeach %} + + baton->req.data = baton; + baton->done = false; + + uv_queue_work(uv_default_loop(), &baton->req, {{ field.name }}_asyncWork, {{ field.name }}_asyncAfter); + + while(!baton->done) { + this_thread::sleep_for(chrono::milliseconds(1)); + } + + {% each field|returnsInfo true false as _return %} + *{{ _return.name }} = *baton->{{ _return.name }}; + {% endeach %} + + return baton->result; + } + + void {{ cppClassName }}::{{ field.name }}_asyncWork(uv_work_t* req) { + // We aren't doing any work on a seperate thread, just need to + // access the main node thread in the async after method. + // However, this worker method is still needed + } + + void {{ cppClassName }}::{{ field.name }}_asyncAfter(uv_work_t* req, int status) { + NanScope(); + + {{ field.name|titleCase }}Baton* baton = static_cast<{{ field.name|titleCase }}Baton*>(req->data); + {{ cppClassName }}* instance = static_cast<{{ cppClassName }}*>(baton->payload); + + if (instance->{{ field.name }}->IsEmpty()) { + {% if field.returnType == "int" %} + baton->result = {{ field.returnNoResults }}; // no results acquired + {% endif %} + + baton->done = true; + return; + } + + Local argv[{{ field.args|jsArgsCount }}] = { + {% each field.args|argsInfo as arg %} + {% if arg.name == "payload" %} + {%-- payload is always the last arg --%} + NanNew(instance->{{ fields|payloadFor field.name }}) + {% elsif arg.isJsArg %} + {% if arg.isEnum %} + NanNew((int)baton->{{ arg.name }}), + {% elsif arg.isLibgitType %} + NanNew({{ arg.cppClassName }}::New(&baton->{{ arg.name }}, false)), + {% elsif arg.cType == "size_t" %} + // HACK: NAN should really have an overload for NanNew to support size_t + NanNew((unsigned int)baton->{{ arg.name }}), + {% else %} + NanNew(baton->{{ arg.name }}), + {% endif %} + {% endif %} + {% endeach %} + }; + + TryCatch tryCatch; + Handle result = instance->{{ field.name }}->Call({{ field.args|jsArgsCount }}, argv); + + if (result->IsObject() && result->ToObject()->Has(NanNew("then"))) { + Handle thenProp = result->ToObject()->Get(NanNew("then")); + + if (thenProp->IsFunction()) { + // we can be reasonbly certain that the result is a promise + Local promise = result->ToObject(); + + NanAssignPersistent(baton->promise, promise); + + uv_queue_work(uv_default_loop(), &baton->req, {{ field.name }}_asyncWork, {{ field.name }}_asyncPromisePolling); + return; + } + } + + {{ field.returnType }} resultStatus; + + {% each field|returnsInfo true false as _return %} + if (result.IsEmpty() || result->IsNativeError()) { + baton->result = {{ field.returnError }}; + } + else if (!result->IsNull() && !result->IsUndefined()) { + {{ _return.cppClassName }}* wrapper = ObjectWrap::Unwrap<{{ _return.cppClassName }}>(result->ToObject()); + wrapper->selfFreeing = false; + + baton->{{ _return.name }} = wrapper->GetRefValue(); + baton->result = {{ field.returnSuccess }}; + } + else { + baton->result = {{ field.returnNoResults }}; + } + {% endeach %} + baton->done = true; + } + + void {{ cppClassName }}::{{ field.name }}_asyncPromisePolling(uv_work_t* req, int status) { + NanScope(); + + {{ field.name|titleCase }}Baton* baton = static_cast<{{ field.name|titleCase }}Baton*>(req->data); + Local promise = NanNew(baton->promise); + NanCallback* isPendingFn = new NanCallback(promise->Get(NanNew("isPending")).As()); + Local argv[1]; // MSBUILD won't assign an array of length 0 + Local isPending = isPendingFn->Call(0, argv)->ToBoolean(); + + if (isPending->Value()) { + uv_queue_work(uv_default_loop(), &baton->req, {{ field.name }}_asyncWork, {{ field.name }}_asyncPromisePolling); + return; + } + + NanCallback* isFulfilledFn = new NanCallback(promise->Get(NanNew("isFulfilled")).As()); + Local isFulfilled = isFulfilledFn->Call(0, argv)->ToBoolean(); + + if (isFulfilled->Value()) { + NanCallback* resultFn = new NanCallback(promise->Get(NanNew("value")).As()); + Handle result = resultFn->Call(0, argv); + {{ field.returnType }} resultStatus; + + {% each field|returnsInfo true false as _return %} + if (result.IsEmpty() || result->IsNativeError()) { + baton->result = {{ field.returnError }}; + } + else if (!result->IsNull() && !result->IsUndefined()) { + {{ _return.cppClassName }}* wrapper = ObjectWrap::Unwrap<{{ _return.cppClassName }}>(result->ToObject()); + wrapper->selfFreeing = false; + + baton->{{ _return.name }} = wrapper->GetRefValue(); + baton->result = {{ field.returnSuccess }}; + } + else { + baton->result = {{ field.returnNoResults }}; + } + {% endeach %} + baton->done = true; + } + else { + // promise was rejected + baton->result = {{ field.returnError }}; + baton->done = false; + } + } + {% endif %} + {% endif %} +{% endeach %} diff --git a/generate/combyne/partials/fields.cc b/generate/combyne/partials/fields.cc new file mode 100644 index 000000000..82cc8d084 --- /dev/null +++ b/generate/combyne/partials/fields.cc @@ -0,0 +1,14 @@ +{% each fields|fieldsInfo as field %} + {% if not field.ignore %} + NAN_METHOD({{ cppClassName }}::{{ field.cppFunctionName }}) { + NanScope(); + Handle to; + + {{ field.cType }} {% if not field.cppClassName|isV8Value %}*{% endif %}{{ field.name }} = + {% if not field.cppClassName|isV8Value %}&{% endif %}ObjectWrap::Unwrap<{{ cppClassName }}>(args.This())->GetValue()->{{ field.name }}; + + {% partial convertToV8 field %} + NanReturnValue(to); + } + {% endif %} +{% endeach %} diff --git a/generate/partials/guard_arguments.cc b/generate/combyne/partials/guard_arguments.cc similarity index 53% rename from generate/partials/guard_arguments.cc rename to generate/combyne/partials/guard_arguments.cc index 9bc969979..5179ce908 100644 --- a/generate/partials/guard_arguments.cc +++ b/generate/combyne/partials/guard_arguments.cc @@ -2,10 +2,18 @@ {%each args|argsInfo as arg%} {%if arg.isJsArg%} {%if not arg.isOptional%} + {%if arg | isOid %} + if (args.Length() == {{arg.jsArg}} + || (!args[{{arg.jsArg}}]->IsObject() && !args[{{arg.jsArg}}]->IsString())) { + return NanThrowError("{{arg.jsClassName}} {{arg.name}} is required."); + } + + {%else%} if (args.Length() == {{arg.jsArg}} || !args[{{arg.jsArg}}]->Is{{arg.cppClassName|cppToV8}}()) { return NanThrowError("{{arg.jsClassName}} {{arg.name}} is required."); } + {%endif%} {%endif%} {%endif%} {%endeach%} diff --git a/generate/partials/sync_function.cc b/generate/combyne/partials/sync_function.cc similarity index 88% rename from generate/partials/sync_function.cc rename to generate/combyne/partials/sync_function.cc index 042a7d3a3..243c1654e 100644 --- a/generate/partials/sync_function.cc +++ b/generate/combyne/partials/sync_function.cc @@ -41,6 +41,10 @@ from_{{ arg.name }} {%each args|argsInfo as arg %} {%if arg.shouldAlloc %} free({{ arg.name }}); + {%elsif arg | isOid %} + if (args[{{ arg.jsArg }}]->IsString()) { + free({{ arg.name }}); + } {%endif%} {%endeach%} @@ -52,6 +56,14 @@ from_{{ arg.name }} } {%endif%} +{%each args|argsInfo as arg %} + {%if arg | isOid %} + if (args[{{ arg.jsArg }}]->IsString()) { + free(&from_{{ arg.name }}); + } + {%endif%} +{%endeach%} + {%if not .|returnsCount %} NanReturnUndefined(); {%else%} diff --git a/generate/templates/binding.gyp b/generate/combyne/templates/binding.gyp similarity index 88% rename from generate/templates/binding.gyp rename to generate/combyne/templates/binding.gyp index 10f292de3..6e287b408 100644 --- a/generate/templates/binding.gyp +++ b/generate/combyne/templates/binding.gyp @@ -1,4 +1,5 @@ -# This is a generated file, modify: generate/templates/binding.gyp. +# // This is a generated file, modify: generate/templates/binding.gyp. + { "targets": [ { @@ -12,11 +13,11 @@ "src/nodegit.cc", "src/wrapper.cc", "src/functions/copy.cc", - {%each%} + {% each %} {% if type != "enum" %} - "src/{{ name }}.cc", + "src/{{ name }}.cc", {% endif %} - {%endeach%} + {% endeach %} ], "include_dirs": [ diff --git a/generate/combyne/templates/class_content.cc b/generate/combyne/templates/class_content.cc new file mode 100644 index 000000000..538b10940 --- /dev/null +++ b/generate/combyne/templates/class_content.cc @@ -0,0 +1,133 @@ +// This is a generated file, modify: generate/templates/class_content.cc. +#include +#include + +extern "C" { + #include + {% each cDependencies as dependency %} + #include <{{ dependency }}> + {% endeach %} +} + + +#include "../include/functions/copy.h" +#include "../include/macros.h" +#include "../include/{{ filename }}.h" + +{% each dependencies as dependency %} + #include "{{ dependency }}" +{% endeach %} + +#include + +using namespace std; +using namespace v8; +using namespace node; + +{% if cType %} + {{ cppClassName }}::{{ cppClassName }}({{ cType }} *raw, bool selfFreeing) { + this->raw = raw; + this->selfFreeing = selfFreeing; + } + + {{ cppClassName }}::~{{ cppClassName }}() { + {% if freeFunctionName %} + if (this->selfFreeing) { + {{ freeFunctionName }}(this->raw); + } + {% endif %} + } + + void {{ cppClassName }}::InitializeComponent(Handle target) { + NanScope(); + + Local tpl = NanNew(New); + + tpl->InstanceTemplate()->SetInternalFieldCount(1); + tpl->SetClassName(NanNew("{{ jsClassName }}")); + + {% each functions as function %} + {% if not function.ignore %} + {% if function.isPrototypeMethod %} + NODE_SET_PROTOTYPE_METHOD(tpl, "{{ function.jsFunctionName }}", {{ function.cppFunctionName }}); + {% else %} + NODE_SET_METHOD(tpl, "{{ function.jsFunctionName }}", {{ function.cppFunctionName }}); + {% endif %} + {% endif %} + {% endeach %} + + {% each fields as field %} + {% if not field.ignore %} + NODE_SET_PROTOTYPE_METHOD(tpl, "{{ field.jsFunctionName }}", {{ field.cppFunctionName }}); + {% endif %} + {% endeach %} + + Local _constructor_template = tpl->GetFunction(); + NanAssignPersistent(constructor_template, _constructor_template); + target->Set(NanNew("{{ jsClassName }}"), _constructor_template); + } + + NAN_METHOD({{ cppClassName }}::New) { + NanScope(); + + if (args.Length() == 0 || !args[0]->IsExternal()) { + {% if createFunctionName %} + return NanThrowError("A new {{ cppClassName }} cannot be instantiated. Use {{ jsCreateFunctionName }} instead."); + {% else %} + return NanThrowError("A new {{ cppClassName }} cannot be instantiated."); + {% endif %} + } + + {{ cppClassName }}* object = new {{ cppClassName }}(static_cast<{{ cType }} *>(Handle::Cast(args[0])->Value()), args[1]->BooleanValue()); + object->Wrap(args.This()); + + NanReturnValue(args.This()); + } + + Handle {{ cppClassName }}::New(void *raw, bool selfFreeing) { + NanEscapableScope(); + Handle argv[2] = { NanNew((void *)raw), NanNew(selfFreeing) }; + return NanEscapeScope(NanNew({{ cppClassName }}::constructor_template)->NewInstance(2, argv)); + } + + {{ cType }} *{{ cppClassName }}::GetValue() { + return this->raw; + } + + {{ cType }} **{{ cppClassName }}::GetRefValue() { + return &this->raw; + } + +{% else %} + + void {{ cppClassName }}::InitializeComponent(Handle target) { + NanScope(); + + Local object = NanNew(); + + {% each functions as function %} + {% if not function.ignore %} + NODE_SET_METHOD(object, "{{ function.jsFunctionName }}", {{ function.cppFunctionName }}); + {% endif %} + {% endeach %} + + target->Set(NanNew("{{ jsClassName }}"), object); + } + +{% endif %} + +{% each functions as function %} + {% if not function.ignore %} + {% if function.isAsync %} + {% partial asyncFunction function %} + {% else %} + {% partial syncFunction function %} + {% endif %} + {% endif %} +{% endeach %} + +{% partial fields . %} + +{% if not cTypeIsUndefined %} + Persistent {{ cppClassName }}::constructor_template; +{% endif %} diff --git a/generate/templates/class_header.h b/generate/combyne/templates/class_header.h similarity index 96% rename from generate/templates/class_header.h rename to generate/combyne/templates/class_header.h index ab0615362..d438e0317 100644 --- a/generate/templates/class_header.h +++ b/generate/combyne/templates/class_header.h @@ -69,6 +69,9 @@ class {{ cppClassName }} : public ObjectWrap { {{ arg.cType|replace "**" "*" }} {{ arg.name }}; {%else%} {{ arg.cType }} {{ arg.name }}; + {%if arg | isOid %} + bool {{ arg.name }}NeedsFree; + {%endif%} {%endif%} {%endeach%} }; diff --git a/generate/combyne/templates/enums.js b/generate/combyne/templates/enums.js new file mode 100644 index 000000000..f9823800a --- /dev/null +++ b/generate/combyne/templates/enums.js @@ -0,0 +1,13 @@ +var NodeGit = require("../"); +NodeGit.Enums = {}; + +{% each . as enumerable %} + {% if enumerable.type == "enum" %} + NodeGit.{{ enumerable.owner }}.{{ enumerable.JsName }} = { + {% each enumerable.values as value %} + {{ value.JsName }}: {{ value.value }}, + {% endeach %} + }; + + {% endif %} +{% endeach %} diff --git a/generate/templates/nodegit.cc b/generate/combyne/templates/nodegit.cc similarity index 77% rename from generate/templates/nodegit.cc rename to generate/combyne/templates/nodegit.cc index 5431d753b..9bd2dd89b 100644 --- a/generate/templates/nodegit.cc +++ b/generate/combyne/templates/nodegit.cc @@ -7,7 +7,7 @@ #include "../include/functions/copy.h" {% each %} {% if type != "enum" %} -#include "../include/{{ filename }}.h" + #include "../include/{{ filename }}.h" {% endif %} {% endeach %} @@ -15,11 +15,11 @@ extern "C" void init(Handle target) { NanScope(); Wrapper::InitializeComponent(target); - {%each%} + {% each %} {% if type != "enum" %} - {{ cppClassName }}::InitializeComponent(target); + {{ cppClassName }}::InitializeComponent(target); {% endif %} - {%endeach%} + {% endeach %} } NODE_MODULE(nodegit, init) diff --git a/lib/nodegit.js b/generate/combyne/templates/nodegit.js similarity index 54% rename from lib/nodegit.js rename to generate/combyne/templates/nodegit.js index 43760522c..5ea95e89e 100644 --- a/lib/nodegit.js +++ b/generate/combyne/templates/nodegit.js @@ -1,6 +1,5 @@ var Promise = require("nodegit-promise"); var promisify = require("promisify-node"); -var descriptors = require("../generate/idefs.json"); var rawApi; // Attempt to load the production release first, if it fails fall back to the @@ -13,31 +12,40 @@ catch (e) { rawApi = require("../build/Debug/nodegit"); } -// Native methods do not return an identifiable function, so this function will -// filter down the function identity to match the libgit2 descriptor. -descriptors.forEach(function(descriptor) { - if (descriptor.type == "enum") { - return; - } - var Ctor = rawApi[descriptor.jsClassName]; - - // Iterate over each function in the file. - descriptor.functions.filter(function(func) { - return func.isAsync; - }).forEach(function(asyncFunc) { - var original = null; - - // Special case when you have a prototype method. - if (asyncFunc.isPrototypeMethod && Ctor.prototype) { - original = Ctor.prototype[asyncFunc.jsFunctionName]; - Ctor.prototype[asyncFunc.jsFunctionName] = promisify(original); - } - else { - original = Ctor[asyncFunc.jsFunctionName]; - Ctor[asyncFunc.jsFunctionName] = promisify(original); - } - }); -}); +// Native methods do not return an identifiable function, so we +// have to override them here +/* jshint ignore:start */ +{% each . as idef %} + {% if idef.type != "enum" %} + + var _{{ idef.jsClassName }} + = rawApi.{{idef.jsClassName}}; + + {% each idef.functions as fn %} + {% if fn.isAsync %} + + {% if fn.isPrototypeMethod %} + + var _{{ idef.jsClassName }}_{{ fn.jsFunctionName}} + = _{{ idef.jsClassName }}.prototype.{{ fn.jsFunctionName }}; + _{{ idef.jsClassName }}.prototype.{{ fn.jsFunctionName }} + = promisify(_{{ idef.jsClassName }}_{{ fn.jsFunctionName}}); + + {% else %} + + var _{{ idef.jsClassName }}_{{ fn.jsFunctionName}} + = _{{ idef.jsClassName }}.{{ fn.jsFunctionName }}; + _{{ idef.jsClassName }}.{{ fn.jsFunctionName }} + = promisify(_{{ idef.jsClassName }}_{{ fn.jsFunctionName}}); + + {% endif %} + + {% endif %} + {% endeach %} + + {% endif %} +{% endeach %} +/* jshint ignore:end */ // Set the exports prototype to the raw API. exports.__proto__ = rawApi; diff --git a/generate/templates/struct_content.cc b/generate/combyne/templates/struct_content.cc similarity index 61% rename from generate/templates/struct_content.cc rename to generate/combyne/templates/struct_content.cc index fca6cad30..86d850d40 100644 --- a/generate/templates/struct_content.cc +++ b/generate/combyne/templates/struct_content.cc @@ -5,23 +5,25 @@ #include extern "C" { -#include -{%each cDependencies as dependency %} -#include <{{ dependency }}> -{%endeach%} + #include + {% each cDependencies as dependency %} + #include <{{ dependency }}> + {% endeach %} } +#include #include "../include/functions/copy.h" #include "../include/{{ filename }}.h" -{%each dependencies as dependency%} -#include "{{ dependency }}" -{%endeach%} -#include +{% each dependencies as dependency %} + #include "{{ dependency }}" +{% endeach %} using namespace v8; using namespace node; using namespace std; + + // generated from struct_content.cc {{ cppClassName }}::{{ cppClassName }}() { {{ cType }} wrappedValue = {{ cType|upper }}_INIT; @@ -47,32 +49,31 @@ using namespace std; } void {{ cppClassName }}::ConstructFields() { - {%each fields|fieldsInfo as field %} - {%if not field.ignore %} - {%if not field.isEnum %} - {%if field.hasConstructor %} - Local {{ field.name }}Temp = {{ field.cppClassName }}::New(&this->raw->{{ field.name }}, false)->ToObject(); - NanAssignPersistent(this->{{ field.name }}, {{ field.name }}Temp); - - {%elsif field.isLibgitType %} - Local {{ field.name }}Temp = {{ field.cppClassName }}::New(&this->raw->{{ field.name }}, false)->ToObject(); - NanAssignPersistent(this->{{ field.name }}, {{ field.name }}Temp); - - {%elsif field.isCallbackFunction %} - - // Set the static method call and set the payload for this function to be - // the current instance - this->raw->{{ field.name }} = ({{ field.cType }}){{ field.name }}_cppCallback; - this->raw->{{ fields|payloadFor field.name }} = (void *)this; - this->{{ field.name }} = new NanCallback(); - {%elsif field.payloadFor %} - - Local {{ field.name }} = NanUndefined(); - NanAssignPersistent(this->{{ field.name }}, {{ field.name }}); - {%endif%} - {%endif%} - {%endif%} - {%endeach%} + {% each fields|fieldsInfo as field %} + {% if not field.ignore %} + {% if not field.isEnum %} + {% if field.hasConstructor |or field.isLibgitType %} + Local {{ field.name }}Temp = {{ field.cppClassName }}::New( + &this->raw->{{ field.name }}, + false + )->ToObject(); + NanAssignPersistent(this->{{ field.name }}, {{ field.name }}Temp); + + {% elsif field.isCallbackFunction %} + + // Set the static method call and set the payload for this function to be + // the current instance + this->raw->{{ field.name }} = ({{ field.cType }}){{ field.name }}_cppCallback; + this->raw->{{ fields|payloadFor field.name }} = (void *)this; + this->{{ field.name }} = new NanCallback(); + {% elsif field.payloadFor %} + + Local {{ field.name }} = NanUndefined(); + NanAssignPersistent(this->{{ field.name }}, {{ field.name }}); + {% endif %} + {% endif %} + {% endif %} + {% endeach %} } void {{ cppClassName }}::InitializeComponent(Handle target) { @@ -83,11 +84,11 @@ void {{ cppClassName }}::InitializeComponent(Handle target) { tpl->InstanceTemplate()->SetInternalFieldCount(1); tpl->SetClassName(NanNew("{{ jsClassName }}")); - {%each fields as field%} - {%if not field.ignore%} - tpl->InstanceTemplate()->SetAccessor(NanNew("{{ field.jsFunctionName }}"), Get{{ field.cppFunctionName}}, Set{{ field.cppFunctionName}}); - {%endif%} - {%endeach%} + {% each fields as field %} + {% if not field.ignore %} + tpl->InstanceTemplate()->SetAccessor(NanNew("{{ field.jsFunctionName }}"), Get{{ field.cppFunctionName}}, Set{{ field.cppFunctionName}}); + {% endif %} + {% endeach %} Local _constructor_template = tpl->GetFunction(); NanAssignPersistent(constructor_template, _constructor_template); @@ -125,6 +126,6 @@ Handle {{ cppClassName }}::New(void* raw, bool selfFreeing) { return &this->raw; } -{%partial fieldAccessors .%} +{% partial fieldAccessors . %} Persistent {{ cppClassName }}::constructor_template; diff --git a/generate/combyne/templates/struct_header.h b/generate/combyne/templates/struct_header.h new file mode 100644 index 000000000..be27df33a --- /dev/null +++ b/generate/combyne/templates/struct_header.h @@ -0,0 +1,92 @@ +#ifndef {{ cppClassName|upper }}_H +#define {{ cppClassName|upper }}_H +// generated from struct_header.h +#include +#include + +extern "C" { + #include + {% each cDependencies as dependency %} + #include <{{ dependency }}> + {% endeach %} +} + +{% each dependencies as dependency %} + #include "{{ dependency }}" +{% endeach %} + +using namespace node; +using namespace v8; + +class {{ cppClassName }} : public ObjectWrap { + public: + {{ cppClassName }}({{ cType }}* raw, bool selfFreeing); + static Persistent constructor_template; + static void InitializeComponent (Handle target); + + {{ cType }} *GetValue(); + {{ cType }} **GetRefValue(); + + static Handle New(void *raw, bool selfFreeing); + + bool selfFreeing; + + {% each fields as field %} + {% if not field.ignore %} + {% if field.isCallbackFunction %} + static {{ field.returnType }} {{ field.name }}_cppCallback ( + {% each field.args|argsInfo as arg %} + {{ arg.cType }} {{ arg.name}} + {% if not arg.lastArg %} + , + {% endif %} + {% endeach %} + ); + + static void {{ field.name }}_asyncWork(uv_work_t* req); + static void {{ field.name }}_asyncAfter(uv_work_t* req, int status); + static void {{ field.name }}_asyncPromisePolling(uv_work_t* req, int status); + struct {{ field.name|titleCase }}Baton { + {% each field.args|argsInfo as arg %} + {{ arg.cType }} {{ arg.name}}; + {% endeach %} + + uv_work_t req; + {{ field.returnType }} result; + Persistent promise; + bool done; + }; + {% endif %} + {% endif %} + {% endeach %} + + private: + {{ cppClassName }}(); + ~{{ cppClassName }}(); + + void ConstructFields(); + + static NAN_METHOD(New); + + {% each fields as field %} + {% if not field.ignore %} + {% if not field.isEnum %} + {% if field.isLibgitType %} + Persistent {{ field.name }}; + {% elsif field.isCallbackFunction %} + NanCallback* {{ field.name }}; + {% elsif field.payloadFor %} + Persistent {{ field.name }}; + {% endif %} + {% endif %} + + static NAN_GETTER(Get{{ field.cppFunctionName }}); + static NAN_SETTER(Set{{ field.cppFunctionName }}); + + {% endif %} + {% endeach %} + + {{ cType }} *raw; +}; + +#endif diff --git a/generate/index.js b/generate/index.js index 23e36792e..9eea74ecf 100644 --- a/generate/index.js +++ b/generate/index.js @@ -1,110 +1,3 @@ -const path = require("path"); -const combyne = require("combyne"); -const file = require("./util/file"); -const idefs = require("./idefs"); -const promisify = require("promisify-node"); -const fse = promisify(require("fs-extra")); - -// Customize the delimiters so as to not process `{{{` or `}}}`. -combyne.settings.delimiters = { - START_RAW: "{{=", - END_RAW: "=}}" -}; - -var partials = { - asyncFunction: file.read("partials/async_function.cc"), - convertFromV8: file.read("partials/convert_from_v8.cc"), - convertToV8: file.read("partials/convert_to_v8.cc"), - doc: file.read("partials/doc.cc"), - fields: file.read("partials/fields.cc"), - guardArguments: file.read("partials/guard_arguments.cc"), - syncFunction: file.read("partials/sync_function.cc"), - fieldAccessors: file.read("partials/field_accessors.cc") -}; - -var templates = { - class_content: file.read("templates/class_content.cc"), - struct_content: file.read("templates/struct_content.cc"), - class_header: file.read("templates/class_header.h"), - struct_header: file.read("templates/struct_header.h"), - binding: file.read("templates/binding.gyp"), - nodegit: file.read("templates/nodegit.cc"), - enums: file.read("templates/enums.js") -}; - -var filters = { - upper: require("./filters/upper"), - replace: require("./filters/replace"), - titleCase: require("./filters/title_case"), - or: require("./filters/or"), - and: require("./filters/and"), - defaultValue: require("./filters/default_value"), - argsInfo: require("./filters/args_info"), - cppToV8: require("./filters/cpp_to_v8"), - jsArgsCount: require("./filters/js_args_count"), - isV8Value: require("./filters/is_v8_value"), - isPointer: require("./filters/is_pointer"), - isDoublePointer: require("./filters/is_double_pointer"), - unPointer: require("./filters/un_pointer"), - payloadFor: require("./filters/payload_for"), - hasReturnType: require("./filters/has_return_type"), - hasReturns: require("./filters/has_returns"), - returnsCount: require("./filters/returns_count"), - returnsInfo: require("./filters/returns_info"), - fieldsInfo: require("./filters/fields_info") -}; - -// Convert Buffers to Combyne templates. -Object.keys(templates).forEach(function(template) { - templates[template] = combyne(templates[template]); - - // Attach all filters to all templates. - Object.keys(filters).forEach(function(filter) { - templates[template].registerFilter(filter, filters[filter]); - }); -}); - -// Attach all partials to select templates. -Object.keys(partials).forEach(function(partial) { - templates.class_content.registerPartial(partial, combyne(partials[partial])); - templates.struct_content.registerPartial(partial, combyne(partials[partial])); -}); - - -// Determine which definitions to actually include in the source code. -// This might not be needed anymore but to be frank I'm not totally positive -var enabled = idefs.filter(function(idef) { - return !idef.ignore; -}); - -fse.remove(path.resolve(__dirname, "../src")).then(function() { - return fse.remove(path.resolve(__dirname, "../include")); -}).then(function() { - return fse.copy(path.resolve(__dirname, "./manual/"), path.resolve(__dirname, "../")); -}).then(function() { - // Write out single purpose templates. - file.write("../binding.gyp", templates.binding.render(enabled)); - file.write("../src/nodegit.cc", templates.nodegit.render(enabled)); - - - // Write out all the classes. - enabled.forEach(function(idef) { - try { - if (idef.type == "struct") { - file.write("../src/" + idef.filename + ".cc", templates.struct_content.render(idef)); - file.write("../include/" + idef.filename + ".h", templates.struct_header.render(idef)); - } - else if (idef.type == "class") { - file.write("../src/" + idef.filename + ".cc", templates.class_content.render(idef)); - file.write("../include/" + idef.filename + ".h", templates.class_header.render(idef)); - } - } - catch (e) { - if (process.env.BUILD_ONLY) { - console.log(e); - } - } - }); - - file.write("../lib/enums.js", templates.enums.render(enabled)); -}); +require("./scripts/generateJson"); +require("./scripts/generateNativeCode"); +require("./scripts/generateMissingTests"); diff --git a/generate/callbacks.json b/generate/input/callbacks.json similarity index 100% rename from generate/callbacks.json rename to generate/input/callbacks.json diff --git a/generate/descriptor.json b/generate/input/descriptor.json similarity index 94% rename from generate/descriptor.json rename to generate/input/descriptor.json index dd767bf55..cca202762 100644 --- a/generate/descriptor.json +++ b/generate/input/descriptor.json @@ -1068,16 +1068,52 @@ "git_remote_default_branch": { "ignore": true }, + "git_remote_fetch": { + "args": { + "reflog_message": { + "isOptional": true + } + }, + "isAsync": true, + "return": { + "isErrorCode": true + } + }, "git_remote_get_fetch_refspecs": { - "ignore": true + "args": { + "array": { + "isReturn": true, + "shouldAlloc": true, + "cppClassName": "Array", + "jsClassName": "Array", + "size": "count", + "key": "strings" + } + }, + "isAsync": true }, "git_remote_get_push_refspecs": { - "ignore": true + "args": { + "array": { + "isReturn": true, + "shouldAlloc": true, + "cppClassName": "Array", + "jsClassName": "Array", + "size": "count", + "key": "strings" + } + }, + "isAsync": true }, "git_remote_list": { "args": { "out": { - "shouldAlloc": true + "isReturn": true, + "shouldAlloc": true, + "cppClassName": "Array", + "jsClassName": "Array", + "size": "count", + "key": "strings" } } }, @@ -1086,6 +1122,12 @@ }, "git_remote_rename": { "ignore": true + }, + "git_remote_set_fetch_refspecs": { + "ignore": true + }, + "git_remote_set_push_refspecs": { + "ignore": true } } }, @@ -1156,9 +1198,15 @@ }, "signature": { "functions": { + "git_signature_default": { + "isAsync": false + }, "git_signature_new": { "isAsync": false - } + }, + "git_signature_now": { + "isAsync": false + } } }, "smart": { @@ -1221,6 +1269,19 @@ "git_tag_foreach": { "ignore": true }, + "git_tag_list": { + "args": { + "tag_names": { + "isReturn": true, + "shouldAlloc": true, + "cppClassName": "Array", + "jsClassName": "Array", + "size": "count", + "key": "strings" + } + }, + "isAsync": true + }, "git_tag_target": { "args": { "target_out": { diff --git a/generate/missing-tests-ignore.json b/generate/input/ignored-missing-tests.json similarity index 100% rename from generate/missing-tests-ignore.json rename to generate/input/ignored-missing-tests.json diff --git a/generate/libgit2-supplement.json b/generate/input/libgit2-supplement.json similarity index 100% rename from generate/libgit2-supplement.json rename to generate/input/libgit2-supplement.json diff --git a/generate/v0.21.2.json b/generate/input/v0.21.2.json similarity index 100% rename from generate/v0.21.2.json rename to generate/input/v0.21.2.json diff --git a/generate/missing-tests.js b/generate/missing-tests.js deleted file mode 100644 index 339f1a217..000000000 --- a/generate/missing-tests.js +++ /dev/null @@ -1,64 +0,0 @@ -const path = require("path"); -const idefs = require("./idefs"); -const Promise = require("nodegit-promise"); -const promisify = require("promisify-node"); -const fse = promisify(require("fs-extra")); -const testFilesPath = path.resolve(__dirname, "../test/tests"); -const missingFileIgnores = require("./missing-tests-ignore"); - -var output = {}; - -function findMissingTest(idef) { - var testFilePath = path.join(testFilesPath, idef.filename + ".js"); - var result = {}; - - return fse.readFile(testFilePath, "utf8") - .then(function(file) { - var fieldsResult = []; - var functionsResult = []; - var fieldIgnores = (missingFileIgnores[idef.filename] || {}).fields; - var functionIgnores = (missingFileIgnores[idef.filename] || {}).functions; - - fieldIgnores = fieldIgnores || []; - functionIgnores = functionIgnores || []; - file = file || ""; - - idef.fields.forEach(function(field) { - if (file.indexOf(field.jsFunctionName) < 0 - && fieldIgnores.indexOf(field.jsFunctionName < 0)) { - fieldsResult.push(field.jsFunctionName); - } - }); - - result.fields = fieldsResult; - - idef.functions.forEach(function(fn) { - if (file.indexOf(fn.jsFunctionName) < 0 - && functionIgnores.indexOf(fn.jsFunctionName) < 0) { - functionsResult.push(fn.jsFunctionName); - } - }); - - result.functions = functionsResult; - }, - function() { - result.testFileMissing = false; - result.testFilePath = testFilePath; - }).then(function() { - output[idef.filename] = result; - }); -}; - -var promises = []; - -idefs.forEach(function(idef) { - promises.push(findMissingTest(idef)); -}); - -Promise.all(promises) -.then(function() { - fse.writeFileSync(path.join(__dirname, "missing-tests.json"), - JSON.stringify(output, null, 2)); -}, function(fail) { - console.log(fail); -}); diff --git a/generate/partials/convert_to_v8.cc b/generate/partials/convert_to_v8.cc deleted file mode 100644 index fd2802a0d..000000000 --- a/generate/partials/convert_to_v8.cc +++ /dev/null @@ -1,60 +0,0 @@ -// start convert_to_v8 block -{%if cppClassName == 'String' %} - if ({{= parsedName =}}){ - {%if size %} - to = NanNew({{= parsedName =}}, {{ size }}); - {%elsif cType == 'char **' %} - - to = NanNew(*{{= parsedName =}}); - {%else%} - to = NanNew({{= parsedName =}}); - {%endif%} - } - else { - to = NanNull(); - } - - {%if freeFunctionName %} -{{ freeFunctionName }}({{= parsedName =}}); - {%endif%} -{%elsif cppClassName|isV8Value %} - {%if isCppClassIntType %} -to = NanNew<{{ cppClassName }}>(({{ parsedClassName }}){{= parsedName =}}); - {%else%} - to = NanNew<{{ cppClassName }}>({{= parsedName =}}); - {%endif%} -{%elsif cppClassName == 'External' %} -to = NanNew((void *){{= parsedName =}}); -{%elsif cppClassName == 'Array' %} -{%-- - // FIXME this is not general purpose enough. ---%} -{%if size%} -Local tmpArray = NanNew({{= parsedName =}}->{{ size }}); -for (unsigned int i = 0; i < {{= parsedName =}}->{{ size }}; i++) { - tmpArray->Set(NanNew(i), NanNew({{= parsedName =}}->{{ key }}[i])); -} -{%else%} -Local tmpArray = NanNew({{= parsedName =}}); -{%endif%} -to = tmpArray; -{%else%} - {%if copy %} - if ({{= parsedName =}} != NULL) { - {{= parsedName =}} = ({{ cType|replace '**' '*' }} {%if not cType|isPointer %}*{%endif%}){{ copy }}({{= parsedName =}}); - } - {%endif%} - - if ({{= parsedName =}} != NULL) { - // {{= cppClassName }} {{= parsedName }} - {%if cppClassName == 'Wrapper' %} - to = {{ cppClassName }}::New((void *){{= parsedName =}}); - {%else%} - to = {{ cppClassName }}::New((void *){{= parsedName =}}, false); - {%endif%} - } else { - to = NanNull(); - } - -{%endif%} -// end convert_to_v8 block diff --git a/generate/partials/doc.cc b/generate/partials/doc.cc deleted file mode 100644 index 158a48113..000000000 --- a/generate/partials/doc.cc +++ /dev/null @@ -1,15 +0,0 @@ -/** -{%each args as arg %} - {%if not arg.isReturn %} - {%if not arg.isSelf %} - * @param {{ arg.jsClassName }} {{ arg.name }} - {%endif%} - {%endif%} -{%endeach%}{%each .|returnsInfo as returnInfo %} - {%if isAsync %} - * @param {{ returnInfo.jsOrCppClassName }} callback - {%else%} - * @return {{ returnInfo.jsOrCppClassName }} {%if returnInfo.name %}{{ returnInfo.name }}{%else%}result{%endif%} - {%endif%} -{%endeach%} - */ diff --git a/generate/partials/field_accessors.cc b/generate/partials/field_accessors.cc deleted file mode 100644 index a5b629026..000000000 --- a/generate/partials/field_accessors.cc +++ /dev/null @@ -1,217 +0,0 @@ -{%each fields|fieldsInfo as field %} - {%if not field.ignore %} - -NAN_GETTER({{ cppClassName }}::Get{{ field.cppFunctionName }}) { - NanScope(); - - {{ cppClassName }} *wrapper = ObjectWrap::Unwrap<{{ cppClassName }}>(args.This()); - - {%if field.isEnum %} - NanReturnValue(NanNew((int)wrapper->GetValue()->{{ field.name }})); - {%elsif field.isLibgitType | or field.payloadFor %} - NanReturnValue(wrapper->{{ field.name }}); - {%elsif field.isCallbackFunction %} - NanReturnValue(wrapper->{{ field.name }}->GetFunction()); - {%elsif field.cppClassName == 'String' %} - if (wrapper->GetValue()->{{ field.name }}) { - NanReturnValue(NanNew(wrapper->GetValue()->{{ field.name }})); - } - else { - NanReturnUndefined(); - } - {%elsif field.cppClassName|isV8Value %} - NanReturnValue(NanNew<{{ field.cppClassName }}>(wrapper->GetValue()->{{ field.name }})); - {%endif%} -} - -NAN_SETTER({{ cppClassName }}::Set{{ field.cppFunctionName }}) { - NanScope(); - - {{ cppClassName }} *wrapper = ObjectWrap::Unwrap<{{ cppClassName }}>(args.This()); - - {%if field.isEnum %} - if (value->IsNumber()) { - wrapper->GetValue()->{{ field.name }} = ({{ field.cType }}) value->Int32Value(); - } - - {%elsif field.isLibgitType %} - NanDisposePersistent(wrapper->{{ field.name }}); - - wrapper->raw->{{ field.name }} = {%if not field.cType | isPointer %}*{%endif%}ObjectWrap::Unwrap<{{ field.cppClassName }}>(value->ToObject())->GetValue(); - {%elsif field.isCallbackFunction %} - if (value->IsFunction()) { - wrapper->{{ field.name }} = new NanCallback(value.As()); - } - {%elsif field.payloadFor %} - NanAssignPersistent(wrapper->{{ field.name }}, value); - {%elsif field.cppClassName == 'String' %} - if (wrapper->GetValue()->{{ field.name }}) { - //free(wrapper->{{ field.name }}); - } - - String::Utf8Value str(value); - wrapper->GetValue()->{{ field.name }} = strdup(*str); - {%elsif field.isCppClassIntType%} - if (value->IsNumber()) { - wrapper->GetValue()->{{ field.name }} = value->{{field.cppClassName}}Value(); - } - {%else%} - if (value->IsNumber()) { - wrapper->GetValue()->{{ field.name }} = ({{ field.cType }}) value->Int32Value(); - } - {%endif%} -} - - {%if field.isCallbackFunction %} -{{ field.returnType }} {{ cppClassName }}::{{ field.name }}_cppCallback ( - {%each field.args|argsInfo as arg%} - {{ arg.cType }} {{ arg.name}}{%if not arg.lastArg %},{%endif%} - - {%endeach%} - ) { - {{ field.name|titleCase }}Baton* baton = new {{ field.name|titleCase }}Baton(); - - {%each field.args|argsInfo as arg %} - baton->{{ arg.name }} = {{ arg.name }}; - {%endeach%} - baton->req.data = baton; - baton->done = false; - - uv_queue_work(uv_default_loop(), &baton->req, {{ field.name }}_asyncWork, {{ field.name }}_asyncAfter); - - while(!baton->done) { - this_thread::sleep_for(chrono::milliseconds(1)); - } - - {%each field|returnsInfo true false as _return %} - *{{ _return.name }} = *baton->{{ _return.name }}; - {%endeach%} - - return baton->result; -} - -void {{ cppClassName }}::{{ field.name }}_asyncWork(uv_work_t* req) { - // We aren't doing any work on a seperate thread, just need to - // access the main node thread in the async after method. - // However, this worker method is still needed -} - -void {{ cppClassName }}::{{ field.name }}_asyncAfter(uv_work_t* req, int status) { - NanScope(); - - {{ field.name|titleCase }}Baton* baton = static_cast<{{ field.name|titleCase }}Baton*>(req->data); - {{ cppClassName }}* instance = static_cast<{{ cppClassName }}*>(baton->payload); - - if (instance->{{ field.name }}->IsEmpty()) { - {%if field.returnType == "int" %} - baton->result = {{ field.returnNoResults }}; // no results acquired - {%endif%} - baton->done = true; - return; - } - - Local argv[{{ field.args|jsArgsCount }}] = { - {%each field.args|argsInfo as arg %} - {%if arg.name == "payload" %} - {%-- payload is always the last arg --%} - NanNew(instance->{{ fields|payloadFor field.name }}) - {%elsif arg.isJsArg %} - {%if arg.isEnum %} - NanNew((int)baton->{{ arg.name }}), - {%elsif arg.isLibgitType %} - NanNew({{ arg.cppClassName }}::New(&baton->{{ arg.name }}, false)), - {%elsif arg.cType == "size_t" %} - // HACK: NAN should really have an overload for NanNew to support size_t - NanNew((unsigned int)baton->{{ arg.name }}), - {%else%} - NanNew(baton->{{ arg.name }}), - {%endif%} - {%endif%} - {%endeach%} - }; - - TryCatch tryCatch; - Handle result = instance->{{ field.name }}->Call({{ field.args|jsArgsCount }}, argv); - - if (result->IsObject() && result->ToObject()->Has(NanNew("then"))) { - Handle thenProp = result->ToObject()->Get(NanNew("then")); - - if (thenProp->IsFunction()) { - // we can be reasonbly certain that the result is a promise - Local promise = result->ToObject(); - - NanAssignPersistent(baton->promise, promise); - - uv_queue_work(uv_default_loop(), &baton->req, {{ field.name }}_asyncWork, {{ field.name }}_asyncPromisePolling); - return; - } - } - - {{ field.returnType }} resultStatus; - - {%each field|returnsInfo true false as _return%} - if (result.IsEmpty() || result->IsNativeError()) { - baton->result = {{ field.returnError }}; - } - else if (!result->IsNull() && !result->IsUndefined()) { - {{ _return.cppClassName }}* wrapper = ObjectWrap::Unwrap<{{ _return.cppClassName }}>(result->ToObject()); - wrapper->selfFreeing = false; - - baton->{{ _return.name }} = wrapper->GetRefValue(); - baton->result = {{ field.returnSuccess }}; - } - else { - baton->result = {{ field.returnNoResults }}; - } - {%endeach%} - baton->done = true; -} - -void {{ cppClassName }}::{{ field.name }}_asyncPromisePolling(uv_work_t* req, int status) { - NanScope(); - - {{ field.name|titleCase }}Baton* baton = static_cast<{{ field.name|titleCase }}Baton*>(req->data); - Local promise = NanNew(baton->promise); - NanCallback* isPendingFn = new NanCallback(promise->Get(NanNew("isPending")).As()); - Local argv[1]; // MSBUILD won't assign an array of length 0 - Local isPending = isPendingFn->Call(0, argv)->ToBoolean(); - - if (isPending->Value()) { - uv_queue_work(uv_default_loop(), &baton->req, {{ field.name }}_asyncWork, {{ field.name }}_asyncPromisePolling); - return; - } - - NanCallback* isFulfilledFn = new NanCallback(promise->Get(NanNew("isFulfilled")).As()); - Local isFulfilled = isFulfilledFn->Call(0, argv)->ToBoolean(); - - if (isFulfilled->Value()) { - NanCallback* resultFn = new NanCallback(promise->Get(NanNew("value")).As()); - Handle result = resultFn->Call(0, argv); - {{ field.returnType }} resultStatus; - - {%each field|returnsInfo true false as _return%} - if (result.IsEmpty() || result->IsNativeError()) { - baton->result = {{ field.returnError }}; - } - else if (!result->IsNull() && !result->IsUndefined()) { - {{ _return.cppClassName }}* wrapper = ObjectWrap::Unwrap<{{ _return.cppClassName }}>(result->ToObject()); - wrapper->selfFreeing = false; - - baton->{{ _return.name }} = wrapper->GetRefValue(); - baton->result = {{ field.returnSuccess }}; - } - else { - baton->result = {{ field.returnNoResults }}; - } - {%endeach%} - baton->done = true; - } - else { - // promise was rejected - baton->result = {{ field.returnError }}; - baton->done = false; - } -} - {%endif%} - {%endif%} -{%endeach%} diff --git a/generate/partials/fields.cc b/generate/partials/fields.cc deleted file mode 100644 index 45e17503c..000000000 --- a/generate/partials/fields.cc +++ /dev/null @@ -1,15 +0,0 @@ -{%each fields|fieldsInfo as field %} - {%if not field.ignore %} - -NAN_METHOD({{ cppClassName }}::{{ field.cppFunctionName }}) { - NanScope(); - Handle to; - - {{ field.cType }} {%if not field.cppClassName|isV8Value %}*{%endif%}{{ field.name }} = - {%if not field.cppClassName|isV8Value %}&{%endif%}ObjectWrap::Unwrap<{{ cppClassName }}>(args.This())->GetValue()->{{ field.name }}; - - {%partial convertToV8 field %} - NanReturnValue(to); -} - {%endif%} -{%endeach%} diff --git a/generate/scripts/generateJson.js b/generate/scripts/generateJson.js new file mode 100644 index 000000000..35430bff4 --- /dev/null +++ b/generate/scripts/generateJson.js @@ -0,0 +1,222 @@ +const fs = require("fs"); +const path = require("path"); +const helpers = require("./helpers"); +const utils = require("./utils"); +const _ = require("lodash"); + +var version = require("../../package.json").libgit2.version; +var libgit2 = require("../input/v" + version + ".json"); +var descriptor = require("../input/descriptor.json"); + var supplement = require("../input/libgit2-supplement.json"); + +module.exports = (function generateJson() { + libgit2.types.forEach(function(type) { + if (supplement.types[type[0]]){ + _.merge(type[1], supplement.types[type[0]]); + } + }); + + // libgit2's docs aren't complete so we'll add in what they're missing here + Array.prototype.push.apply(libgit2.types, supplement.new.types); + Array.prototype.push.apply(libgit2.groups, supplement.new.groups); + + var output = []; + var dependencyLookup = {}; + var types = []; + var enums = []; + + // reduce all of the groups into a hashmap and a name array for easy lookup + var groups = libgit2.groups.reduce(function(memo, group) { + var groupName = group[0]; + + // Some functions are in the wrong group so we can't just ignore them. + // We have to completely remove them from one group and manually add them + // into the other. + var functionNames = group[1].filter(function(fnName) { + return !supplement.remove[groupName] || + !supplement.remove[groupName].functions || + !~supplement.remove[groupName].functions.indexOf(fnName); + }); + + memo[groupName] = functionNames; + return memo; + }, {}); + + + // Split each type from the array into classes/structs and enums + // each entry is of type ['name', {definingobject}] + libgit2.types.forEach(function(current) { + current[1].typeName = current[0]; + + // just log these out to a file for fun + if (current[1].type === "enum") { + enums.push(current[1]); + } + else { + types.push(current[1]); + } + }); + + var previous = ""; + enums = _(enums).sortBy("name").reduce(function(enumMemo, enumerable) { + if (previous == enumerable.typeName) { + if (process.env.BUILD_ONLY) { + console.warn('Duplicate definition for enum ' + enumerable.typeName + + ". skipped."); + } + } + else if (!enumerable.fields) { + if (process.env.BUILD_ONLY) { + console.warn('Incomplete definition for enum ' + enumerable.typeName + + ". skipped."); + } + } + else { + enumMemo[enumerable.typeName] = { + typeName: enumerable.typeName.replace(/^git_/, "").replace(/_t$/, ""), + type: "enum", + cType: enumerable.typeName, + isMask: (/_t$/).test(enumerable.typeName), + values: enumerable.fields.map(function(field) { + return { + name: field.name, + value: field.value + } + }) + }; + } + + previous = enumerable.typeName; + return enumMemo; + }, {}).valueOf(); + + // decorate the definitions with required data to build the C++ files + types.forEach(function(typeDef) { + var typeName = typeDef.typeName; + typeDef.cType = typeName; + typeName = typeName.replace("git_", ""); + typeDef.typeName = typeName; + dependencyLookup[typeName] = typeName; + + typeDef.functions = groups[typeName] || []; + helpers.decoratePrimaryType(typeDef, enums); + + groups[typeName] = false; + + typeDef.type = typeDef.hasConstructor ? "struct" : "class"; + + output.push(typeDef); + }); + + // Loop over the groups in case we missed anything (eg the types are missing in the docs); + for (var groupName in groups) { + var groupDef = groups[groupName]; + if (groupDef === false) { + continue; + } + + groupDef = { + functions: groupDef + }; + + groupDef.type = "class"; + groupDef.cType = (descriptor.types[groupName] || {}).cType || groupDef.cType; + + groupDef.typeName = groupName; + dependencyLookup[groupName] = groupName; + helpers.decoratePrimaryType(groupDef, enums); + + output.push(groupDef); + } + + // Calculate dependencies + output.forEach(function (def) { + if (def.ignore) { + return; + } + + var dependencies = {}; + var addDependencies = function (prop) { + if (prop.ignore) { + return; + } + + var type = helpers.normalizeCtype(prop.type || prop.cType).replace("git_", ""); + var dependencyFilename = dependencyLookup[type]; + + if (dependencyFilename) { + dependencies[dependencyFilename] = dependencyFilename; + } + + (prop.args || []).forEach(addDependencies); + + if (prop.return) { + addDependencies(prop.return); + } + }; + + def.fields.forEach(addDependencies); + def.functions.forEach(addDependencies); + + Object.keys(dependencies).forEach(function (dependencyFilename) { + def.dependencies.push("../include/" + dependencyFilename + ".h"); + }); + + // Additionally provide a friendly name to the actual filename. + def.name = path.basename(def.filename, ".h"); + + def.functions.forEach(function(fn) { + fn.cppClassName = def.cppClassName; + }); + }); + + // Process enums + _(enums).forEach(function(enumerable) { + output.some(function(obj) { + if (enumerable.typeName.indexOf(obj.typeName) == 0) { + enumerable.owner = obj.jsClassName; + } + else if (enumerable.owner) { + return true; + } + }); + + var override = descriptor.enums[enumerable.typeName] || {}; + + enumerable.owner = override.owner || enumerable.owner || "Enums"; + + enumerable.JsName = enumerable.typeName + .replace(new RegExp("^" + enumerable.owner.toLowerCase()), "") + .replace(/^_/, "") + .toUpperCase(); + + enumerable.values.forEach(function(value) { + value.JsName = value.name + .replace(/^GIT_/, "") + .replace(override.removeString || "", "") + .replace(new RegExp("^" + enumerable.owner.toUpperCase()), "") + .replace(/^_/, "") + .replace(new RegExp("^" + enumerable.JsName), "") + .replace(/^_/, "") + .toUpperCase(); + + if (override.values && override.values[value.name]) { + _.merge(value, override.values[value.name]); + } + }); + + _.merge(enumerable, _.omit(override, ["values"])); + + output.push(enumerable); + }); + + output = _.sortBy(output, "typeName"); + + if (process.argv[2] != "--documentation") { + helpers.filterDocumentation(output); + } + + + utils.writeFile("output/idefs.json", output); + +}()); diff --git a/generate/scripts/generateMissingTests.js b/generate/scripts/generateMissingTests.js new file mode 100644 index 000000000..1b6c7aae2 --- /dev/null +++ b/generate/scripts/generateMissingTests.js @@ -0,0 +1,74 @@ +const path = require("path"); +const Promise = require("nodegit-promise"); +const promisify = require("promisify-node"); +const fse = promisify(require("fs-extra")); +const utils = require("./utils"); + +const testFilesPath = "../test/tests"; +const idefs = require("../output/idefs"); +const missingFileIgnores = require("../input/ignored-missing-tests"); + +module.exports = (function generateMissingTests() { + var output = {}; + + function findMissingTest(idef) { + return new Promise(function(resolve, reject) { + var testFilePath = path.join(testFilesPath, idef.filename + ".js"); + var result = {}; + + var file = utils.readFile(testFilePath); + if (file) { + var fieldsResult = []; + var functionsResult = []; + var fieldIgnores = (missingFileIgnores[idef.filename] || {}).fields; + var functionIgnores = (missingFileIgnores[idef.filename] || {}).functions; + + fieldIgnores = fieldIgnores || []; + functionIgnores = functionIgnores || []; + file = file || ""; + + idef.fields.forEach(function(field) { + if (file.indexOf(field.jsFunctionName) < 0 + && fieldIgnores.indexOf(field.jsFunctionName < 0)) { + fieldsResult.push(field.jsFunctionName); + } + }); + + result.fields = fieldsResult; + + idef.functions.forEach(function(fn) { + if (file.indexOf(fn.jsFunctionName) < 0 + && functionIgnores.indexOf(fn.jsFunctionName) < 0) { + functionsResult.push(fn.jsFunctionName); + } + }); + + result.functions = functionsResult; + } + else { + result.testFileMissing = false; + result.testFilePath = testFilePath; + } + + output[idef.filename] = result; + resolve(); + }); + }; + + var promises = []; + + idefs.forEach(function(idef) { + promises.push(findMissingTest(idef)); + }); + + Promise.all(promises).then( + function() { + utils.writeFile("output/missing-tests.json", output); + }, + function(fail) { + console.log(fail); + } + ); + +}()); + diff --git a/generate/scripts/generateNativeCode.js b/generate/scripts/generateNativeCode.js new file mode 100644 index 000000000..c277e0e9f --- /dev/null +++ b/generate/scripts/generateNativeCode.js @@ -0,0 +1,146 @@ +const path = require("path"); +const combyne = require("combyne"); +const promisify = require("promisify-node"); +const fse = promisify(require("fs-extra")); +const js_beautify = require("js-beautify").js_beautify; +const beautify = function (input) { + return js_beautify(input, { + "brace_style": "end-expand", + "max_preserve_newlines": 2, + "preserve_newlines": true, + "indent_size": 2, + "indent_char": " " + }); +} +const exec = promisify(function(command, opts, callback) { + return require("child_process").exec(command, opts, callback); +}); + + +const utils = require("./utils"); +const idefs = require("../output/idefs"); + +module.exports = (function generateNativeCode() { + // Customize the delimiters so as to not process `{{{` or `}}}`. + combyne.settings.delimiters = { + START_RAW: "{{=", + END_RAW: "=}}" + }; + + var partials = { + asyncFunction: utils.readFile("combyne/partials/async_function.cc"), + convertFromV8: utils.readFile("combyne/partials/convert_from_v8.cc"), + convertToV8: utils.readFile("combyne/partials/convert_to_v8.cc"), + doc: utils.readFile("combyne/partials/doc.cc"), + fields: utils.readFile("combyne/partials/fields.cc"), + guardArguments: utils.readFile("combyne/partials/guard_arguments.cc"), + syncFunction: utils.readFile("combyne/partials/sync_function.cc"), + fieldAccessors: utils.readFile("combyne/partials/field_accessors.cc") + }; + + var templates = { + class_content: utils.readFile("combyne/templates/class_content.cc"), + struct_content: utils.readFile("combyne/templates/struct_content.cc"), + class_header: utils.readFile("combyne/templates/class_header.h"), + struct_header: utils.readFile("combyne/templates/struct_header.h"), + binding: utils.readFile("combyne/templates/binding.gyp"), + nodegitCC: utils.readFile("combyne/templates/nodegit.cc"), + nodegitJS: utils.readFile("combyne/templates/nodegit.js"), + enums: utils.readFile("combyne/templates/enums.js") + }; + + var filters = { + upper: require("../combyne/filters/upper"), + replace: require("../combyne/filters/replace"), + titleCase: require("../combyne/filters/title_case"), + or: require("../combyne/filters/or"), + and: require("../combyne/filters/and"), + defaultValue: require("../combyne/filters/default_value"), + argsInfo: require("../combyne/filters/args_info"), + cppToV8: require("../combyne/filters/cpp_to_v8"), + jsArgsCount: require("../combyne/filters/js_args_count"), + isV8Value: require("../combyne/filters/is_v8_value"), + isPointer: require("../combyne/filters/is_pointer"), + isDoublePointer: require("../combyne/filters/is_double_pointer"), + isOid: require("../combyne/filters/is_oid"), + unPointer: require("../combyne/filters/un_pointer"), + payloadFor: require("../combyne/filters/payload_for"), + hasReturnType: require("../combyne/filters/has_return_type"), + hasReturns: require("../combyne/filters/has_returns"), + returnsCount: require("../combyne/filters/returns_count"), + returnsInfo: require("../combyne/filters/returns_info"), + fieldsInfo: require("../combyne/filters/fields_info") + }; + + // Convert Buffers to Combyne templates. + Object.keys(templates).forEach(function(template) { + templates[template] = combyne(templates[template]); + + // Attach all filters to all templates. + Object.keys(filters).forEach(function(filter) { + templates[template].registerFilter(filter, filters[filter]); + }); + }); + + // Attach all partials to select templates. + Object.keys(partials).forEach(function(partial) { + templates.class_content.registerPartial(partial, combyne(partials[partial])); + templates.struct_content.registerPartial(partial, combyne(partials[partial])); + }); + + + // Determine which definitions to actually include in the source code. + // This might not be needed anymore but to be frank I'm not totally positive + var enabled = idefs.filter(function(idef) { + return !idef.ignore; + }); + + + fse.remove(path.resolve(__dirname, "../../src")).then(function() { + return fse.remove(path.resolve(__dirname, "../../include")); + }).then(function() { + return fse.copy(path.resolve(__dirname, "../combyne/manual/"), path.resolve(__dirname, "../../")); + }).then(function() { + // Write out single purpose templates. + utils.writeFile("../binding.gyp", beautify(templates.binding.render(enabled))); + utils.writeFile("../src/nodegit.cc", templates.nodegitCC.render(enabled)); + utils.writeFile("../lib/nodegit.js", beautify(templates.nodegitJS.render(enabled))); + // Write out all the classes. + enabled.forEach(function(idef) { + try { + if (idef.type && idef.type != "enum") { + utils.writeFile( + "../src/" + idef.filename + ".cc", templates[idef.type + "_content"].render(idef) + ); + utils.writeFile( + "../include/" + idef.filename + ".h", templates[idef.type + "_header"].render(idef) + ); + } + } + catch (e) { + if (process.env.BUILD_ONLY) { + console.log(e); + } + } + }); + + utils.writeFile("../lib/enums.js", beautify(templates.enums.render(enabled))); + }).then(function() { + return exec("command -v astyle").then(function(astyle) { + if (astyle) { + return exec( + "astyle --options=\".astylerc\" " + + path.resolve(__dirname, "../../src") + "/*.cc " + + path.resolve(__dirname, "../../include") + "/*.h" + ).then(function() { + return exec( + "rm " + + path.resolve(__dirname, "../../src") + "/*.cc.orig " + + path.resolve(__dirname, "../../include") + "/*.h.orig " + ); + }); + } + }) + }); + +}()); diff --git a/generate/utils.js b/generate/scripts/helpers.js similarity index 63% rename from generate/utils.js rename to generate/scripts/helpers.js index 24f612d8b..2386461bd 100644 --- a/generate/utils.js +++ b/generate/scripts/helpers.js @@ -1,13 +1,15 @@ -var pointerRegex = /\s*\*\s*/; -var doublePointerRegex = /\s*\*\*\s*/; + var callbackTypePattern = /\s*_cb/; + +var utils = require("./utils"); var _ = require("lodash"); // TODO: When libgit2's docs include callbacks we should be able to remove this -var callbackDefs = require("./callbacks.json"); -var descriptor = require("./descriptor.json"); -var version = require("../package.json").libgit2.version; -var libgit2 = require("./v" + version + ".json"); +var version = require("../../package.json").libgit2.version; +var callbackDefs = require("../input/callbacks.json"); +var descriptor = require("../input/descriptor.json"); +var libgit2 = require("../input/v" + version + ".json"); + var cTypes = libgit2.groups.map(function(group) { return group[0];}); var cTypeMappings = { @@ -27,61 +29,35 @@ var collisionMappings = { "new": "create" } -var Utils = { - titleCase: function(str) { - return str.split(/_|\//).map(function(val, index) { - if (val.length) { - return val[0].toUpperCase() + val.slice(1); - } - - return val; - }).join(""); - }, - - camelCase: function(str) { - return str.split(/_|\//).map(function(val, index) { - return (index >= 1 - ? val[0].toUpperCase() + val.slice(1) - : val[0].toLowerCase() + val.slice(1)); - }).join(""); - }, - - isPointer: function(cType) { - return pointerRegex.test(cType) || doublePointerRegex.test(cType); - }, - - isDoublePointer: function(cType) { - return doublePointerRegex.test(cType); - }, - +var Helpers = { normalizeCtype: function(cType) { return (cType || "") .toLowerCase() .replace("const ", "") .replace("unsigned ", "") .replace("struct", "") - .replace(doublePointerRegex, "") - .replace(pointerRegex, "") + .replace(utils.doublePointerRegex, "") + .replace(utils.pointerRegex, "") .trim(); }, cTypeToCppName: function(cType, ownerType) { - var normalizedType = Utils.normalizeCtype(cType); + var normalizedType = Helpers.normalizeCtype(cType); if (ownerType && normalizedType != ownerType) { normalizedType = normalizedType.replace(ownerType, ""); } - return cTypeMappings[normalizedType] || Utils.titleCase(normalizedType); + return cTypeMappings[normalizedType] || utils.titleCase(normalizedType); }, cTypeToJsName: function(cType, ownerType) { - var output = Utils.camelCase(Utils.cTypeToCppName(cType, ownerType).replace(/^Git/, "")); + var output = utils.camelCase(Helpers.cTypeToCppName(cType, ownerType).replace(/^Git/, "")); var mergedPrefixes = ["from", "by"]; mergedPrefixes.forEach(function(prefix) { - var reg = new RegExp("(^" + prefix + "|" + Utils.titleCase(prefix) + ")([a-z]+)$"); + var reg = new RegExp("(^" + prefix + "|" + utils.titleCase(prefix) + ")([a-z]+)$"); output = output.replace(reg, function(all, prefixMatch, otherWord) { - return prefixMatch + Utils.titleCase(otherWord); + return prefixMatch + utils.titleCase(otherWord); }); }); @@ -102,7 +78,7 @@ var Utils = { return type.used && type.used.needs && type.used.needs.some(function (fnName) { - return Utils.isConstructorFunction(normalizedType, fnName); + return Helpers.isConstructorFunction(normalizedType, fnName); }); }, @@ -112,7 +88,7 @@ var Utils = { isPayloadFor: function(cbField, payloadName) { return ~payloadName.indexOf("_payload") - && Utils.isCallbackFunction(cbField.cType) + && Helpers.isCallbackFunction(cbField.cType) && ~cbField.name.indexOf(payloadName.replace("_payload", "")); }, @@ -150,7 +126,7 @@ var Utils = { var cbFieldName; allFields.some(function (cbField) { - if (Utils.isPayloadFor(cbField, field.name)) { + if (Helpers.isPayloadFor(cbField, field.name)) { cbFieldName = cbField.name; return true; } @@ -163,13 +139,13 @@ var Utils = { }, decorateLibgitType: function(type, types, enums) { - var normalizedType = Utils.normalizeCtype(type.cType); - var libgitType = Utils.getLibgitType(normalizedType, types); + var normalizedType = Helpers.normalizeCtype(type.cType); + var libgitType = Helpers.getLibgitType(normalizedType, types); if (libgitType) { type.isLibgitType = true; type.isEnum = libgitType.type === "enum"; - type.hasConstructor = Utils.hasConstructor(type, normalizedType); + type.hasConstructor = Helpers.hasConstructor(type, normalizedType); // there are no enums at the struct level currently, but we still need to override function args if (type.isEnum) { @@ -188,8 +164,8 @@ var Utils = { var partialOverrides = _.omit(typeDefOverrides, ["fields", "functions"]); typeDef.cType = typeDef.cType || null; - typeDef.cppClassName = Utils.cTypeToCppName(typeDef.cType || "git_" + typeDef.typeName); - typeDef.jsClassName = Utils.titleCase(Utils.cTypeToJsName(typeDef.cType || "git_" + typeDef.typeName)); + typeDef.cppClassName = Helpers.cTypeToCppName(typeDef.cType || "git_" + typeDef.typeName); + typeDef.jsClassName = utils.titleCase(Helpers.cTypeToJsName(typeDef.cType || "git_" + typeDef.typeName)); typeDef.filename = typeDef.typeName; typeDef.isLibgitType = true; typeDef.dependencies = []; @@ -197,13 +173,13 @@ var Utils = { typeDef.fields = typeDef.fields || []; typeDef.fields.forEach(function (field, index, allFields) { var fieldOverrides = typeDefOverrides.fields || {}; - Utils.decorateField(field, allFields, fieldOverrides[field.name] || {}, enums); + Helpers.decorateField(field, allFields, fieldOverrides[field.name] || {}, enums); }); typeDef.needsForwardDeclaration = typeDef.decl === typeDef.cType; - var normalizedType = Utils.normalizeCtype(typeDef.cType); - typeDef.hasConstructor = Utils.hasConstructor(typeDef, normalizedType); + var normalizedType = Helpers.normalizeCtype(typeDef.cType); + typeDef.hasConstructor = Helpers.hasConstructor(typeDef, normalizedType); typeDef.functions = (typeDef.functions).map(function(fn) { var fnDef = libgit2.functions[fn]; @@ -214,58 +190,58 @@ var Utils = { var typeDefOverrides = descriptor.types[typeDef.typeName] || {}; var functionOverrides = typeDefOverrides.functions || {}; typeDef.functions.forEach(function(fnDef) { - Utils.decorateFunction(fnDef, typeDef, functionOverrides[fnDef.cFunctionName] || {}, enums); + Helpers.decorateFunction(fnDef, typeDef, functionOverrides[fnDef.cFunctionName] || {}, enums); }); _.merge(typeDef, partialOverrides); }, decorateField: function(field, allFields, fieldOverrides, enums) { - var normalizeType = Utils.normalizeCtype(field.type); + var normalizeType = Helpers.normalizeCtype(field.type); field.cType = field.type; - field.cppFunctionName = Utils.titleCase(field.name); - field.jsFunctionName = Utils.camelCase(field.name); - field.cppClassName = Utils.cTypeToCppName(field.type); - field.jsClassName = Utils.titleCase(Utils.cTypeToJsName(field.type)); + field.cppFunctionName = utils.titleCase(field.name); + field.jsFunctionName = utils.camelCase(field.name); + field.cppClassName = Helpers.cTypeToCppName(field.type); + field.jsClassName = utils.titleCase(Helpers.cTypeToJsName(field.type)); - if (Utils.isCallbackFunction(field.cType)) { - Utils.processCallback(field); + if (Helpers.isCallbackFunction(field.cType)) { + Helpers.processCallback(field); var argOverrides = fieldOverrides.args || {}; field.args = field.args || []; field.args.forEach(function (arg) { - Utils.decorateArg(arg, null, null, argOverrides[arg.name] || {}, enums); + Helpers.decorateArg(arg, null, null, argOverrides[arg.name] || {}, enums); }); } else { field.isCallbackFunction = false; - Utils.processPayload(field, allFields); + Helpers.processPayload(field, allFields); if (field.payloadFor) { return; } } - Utils.decorateLibgitType(field, libgit2.types, enums); + Helpers.decorateLibgitType(field, libgit2.types, enums); _.merge(field, fieldOverrides); }, decorateArg: function(arg, typeDef, fnDef, argOverrides, enums) { var type = arg.cType || arg.type; - var normalizedType = Utils.normalizeCtype(type); + var normalizedType = Helpers.normalizeCtype(type); arg.cType = type; - arg.cppClassName = Utils.cTypeToCppName(arg.cType); - arg.jsClassName = Utils.titleCase(Utils.cTypeToJsName(arg.cType)); + arg.cppClassName = Helpers.cTypeToCppName(arg.cType); + arg.jsClassName = utils.titleCase(Helpers.cTypeToJsName(arg.cType)); - Utils.decorateLibgitType(arg, libgit2.types, enums); + Helpers.decorateLibgitType(arg, libgit2.types, enums); if (typeDef && fnDef) { // Mark all of the args that are either returns or are the object // itself and determine if this function goes on the prototype // or is a constructor method. - arg.isReturn = arg.name === "out" || (Utils.isDoublePointer(arg.type) && normalizedType == typeDef.cType); - arg.isSelf = Utils.isPointer(arg.type) && normalizedType == typeDef.cType; + arg.isReturn = arg.name === "out" || (utils.isDoublePointer(arg.type) && normalizedType == typeDef.cType); + arg.isSelf = utils.isPointer(arg.type) && normalizedType == typeDef.cType; if (arg.isReturn && fnDef.return && fnDef.return.type === "int") { fnDef.return.isErrorCode = true; @@ -296,8 +272,8 @@ var Utils = { return; } - fnDef.cppFunctionName = Utils.cTypeToCppName(key, "git_" + typeDef.typeName); - fnDef.jsFunctionName = Utils.cTypeToJsName(key, "git_" + typeDef.typeName); + fnDef.cppFunctionName = Helpers.cTypeToCppName(key, "git_" + typeDef.typeName); + fnDef.jsFunctionName = Helpers.cTypeToJsName(key, "git_" + typeDef.typeName); //fnDef.isAsync = false; // until proven otherwise if (fnDef.cppFunctionName == typeDef.cppClassName) { @@ -306,20 +282,20 @@ var Utils = { var argOverrides = fnOverrides.args || {}; fnDef.args.forEach(function(arg) { - Utils.decorateArg(arg, typeDef, fnDef, argOverrides[arg.name] || {}, enums); + Helpers.decorateArg(arg, typeDef, fnDef, argOverrides[arg.name] || {}, enums); }); if (fnDef.return) { - Utils.decorateArg(fnDef.return, typeDef, fnDef, fnOverrides.return || {}, enums); + Helpers.decorateArg(fnDef.return, typeDef, fnDef, fnOverrides.return || {}, enums); } _(collisionMappings).forEach(function(newName, collidingName) { - if (fnDef.cppFunctionName == Utils.titleCase(collidingName)) { - fnDef.cppFunctionName = Utils.titleCase(newName); + if (fnDef.cppFunctionName == utils.titleCase(collidingName)) { + fnDef.cppFunctionName = utils.titleCase(newName); } - if (fnDef.jsFunctionName == Utils.camelCase(collidingName)) { - fnDef.jsFunctionName = Utils.camelCase(newName); + if (fnDef.jsFunctionName == utils.camelCase(collidingName)) { + fnDef.jsFunctionName = utils.camelCase(newName); } }); @@ -354,17 +330,17 @@ var Utils = { }, filterDocumentation: function(idefs) { - Utils.filterIgnored(idefs, function (idef) { - Utils.deleteProperties(idef); + Helpers.filterIgnored(idefs, function (idef) { + Helpers.deleteProperties(idef); - Utils.filterIgnored(idef.fields, Utils.deleteProperties); + Helpers.filterIgnored(idef.fields, Helpers.deleteProperties); - Utils.filterIgnored(idef.functions, function (fn) { - Utils.deleteProperties(fn); + Helpers.filterIgnored(idef.functions, function (fn) { + Helpers.deleteProperties(fn); - Utils.filterIgnored(fn.args, function(arg) { - Utils.deleteProperties(arg); + Helpers.filterIgnored(fn.args, function(arg) { + Helpers.deleteProperties(arg); delete arg.functions; }); }); @@ -372,4 +348,4 @@ var Utils = { } }; -module.exports = Utils; +module.exports = Helpers; diff --git a/generate/scripts/utils.js b/generate/scripts/utils.js new file mode 100644 index 000000000..3395fe272 --- /dev/null +++ b/generate/scripts/utils.js @@ -0,0 +1,67 @@ +const promisify = require("promisify-node"); +const fse = require("fs-extra"); + +const fs = require("fs"); +const path = require("path"); + +// Make a locally bound path joiner, (bound to generate). +var local = path.join.bind(null, __dirname, "../"); + +var util = { + pointerRegex: /\s*\*\s*/, + doublePointerRegex: /\s*\*\*\s*/, + + readFile: function(file) { + try { + return fs.readFileSync(local(file)).toString(); + } + catch (unhandledException) { + return ""; + } + }, + + writeFile: function(file, content) { + try { + var file = local(file); + if (typeof content == "object") { + content = JSON.stringify(content, null, 2) + } + + fse.ensureFileSync(file); + fse.writeFileSync(file, content); + return true; + } + catch (exception) { + return false; + } + }, + + titleCase: function(str) { + return str.split(/_|\//).map(function(val, index) { + if (val.length) { + return val[0].toUpperCase() + val.slice(1); + } + + return val; + }).join(""); + }, + + camelCase: function(str) { + return str.split(/_|\//).map(function(val, index) { + return (index >= 1 + ? val[0].toUpperCase() + val.slice(1) + : val[0].toLowerCase() + val.slice(1)); + }).join(""); + }, + + isPointer: function(type) { + return util.pointerRegex.test(type) || util.doublePointerRegex.test(type); + }, + + isDoublePointer: function(type) { + return util.doublePointerRegex.test(type); + } + +}; + +module.exports = util; diff --git a/generate/setup.js b/generate/setup.js deleted file mode 100644 index 98cb3617f..000000000 --- a/generate/setup.js +++ /dev/null @@ -1,218 +0,0 @@ -const fs = require("fs"); -const path = require("path"); -const utils = require("./utils"); -const _ = require("lodash"); - -var version = require("../package.json").libgit2.version; -var descriptor = require("./descriptor.json"); -var libgit2 = require("./v" + version + ".json"); -var supplement = require("./libgit2-supplement.json"); - -libgit2.types.forEach(function(type) { - if (supplement.types[type[0]]){ - _.merge(type[1], supplement.types[type[0]]); - } -}); - -// libgit2's docs aren't complete so we'll add in what they're missing here -Array.prototype.push.apply(libgit2.types, supplement.new.types); -Array.prototype.push.apply(libgit2.groups, supplement.new.groups); - -var output = []; -var dependencyLookup = {}; -var types = []; -var enums = []; - -// reduce all of the groups into a hashmap and a name array for easy lookup -var groups = libgit2.groups.reduce(function(memo, group) { - var groupName = group[0]; - - // Some functions are in the wrong group so we can't just ignore them. - // We have to completely remove them from one group and manually add them - // into the other. - var functionNames = group[1].filter(function(fnName) { - return !supplement.remove[groupName] || - !supplement.remove[groupName].functions || - !~supplement.remove[groupName].functions.indexOf(fnName); - }); - - memo[groupName] = functionNames; - return memo; -}, {}); - - -// Split each type from the array into classes/structs and enums -// each entry is of type ['name', {definingobject}] -libgit2.types.forEach(function(current) { - current[1].typeName = current[0]; - - // just log these out to a file for fun - if (current[1].type === "enum") { - enums.push(current[1]); - } - else { - types.push(current[1]); - } -}); - -var previous = ""; -enums = _(enums).sortBy("name").reduce(function(enumMemo, enumerable) { - if (previous == enumerable.typeName) { - if (process.env.BUILD_ONLY) { - console.warn('Duplicate definition for enum ' + enumerable.typeName + - ". skipped."); - } - } - else if (!enumerable.fields) { - if (process.env.BUILD_ONLY) { - console.warn('Incomplete definition for enum ' + enumerable.typeName + - ". skipped."); - } - } - else { - enumMemo[enumerable.typeName] = { - typeName: enumerable.typeName.replace(/^git_/, "").replace(/_t$/, ""), - type: "enum", - cType: enumerable.typeName, - isMask: (/_t$/).test(enumerable.typeName), - values: enumerable.fields.map(function(field) { - return { - name: field.name, - value: field.value - } - }) - }; - } - - previous = enumerable.typeName; - return enumMemo; -}, {}).valueOf(); - -// decorate the definitions with required data to build the C++ files -types.forEach(function(typeDef) { - var typeName = typeDef.typeName; - typeDef.cType = typeName; - typeName = typeName.replace("git_", ""); - typeDef.typeName = typeName; - dependencyLookup[typeName] = typeName; - - typeDef.functions = groups[typeName] || []; - utils.decoratePrimaryType(typeDef, enums); - - groups[typeName] = false; - - typeDef.type = typeDef.hasConstructor ? "struct" : "class"; - - output.push(typeDef); -}); - -// Loop over the groups in case we missed anything (eg the types are missing in the docs); -for (var groupName in groups) { - var groupDef = groups[groupName]; - if (groupDef === false) { - continue; - } - - groupDef = { - functions: groupDef - }; - - groupDef.type = "class"; - groupDef.cType = (descriptor.types[groupName] || {}).cType || groupDef.cType; - - groupDef.typeName = groupName; - dependencyLookup[groupName] = groupName; - utils.decoratePrimaryType(groupDef, enums); - - output.push(groupDef); -} - -// Calculate dependencies -output.forEach(function (def) { - if (def.ignore) { - return; - } - - var dependencies = {}; - var addDependencies = function (prop) { - if (prop.ignore) { - return; - } - - var type = utils.normalizeCtype(prop.type || prop.cType).replace("git_", ""); - var dependencyFilename = dependencyLookup[type]; - - if (dependencyFilename) { - dependencies[dependencyFilename] = dependencyFilename; - } - - (prop.args || []).forEach(addDependencies); - - if (prop.return) { - addDependencies(prop.return); - } - }; - - def.fields.forEach(addDependencies); - def.functions.forEach(addDependencies); - - Object.keys(dependencies).forEach(function (dependencyFilename) { - def.dependencies.push("../include/" + dependencyFilename + ".h"); - }); - - // Additionally provide a friendly name to the actual filename. - def.name = path.basename(def.filename, ".h"); - - def.functions.forEach(function(fn) { - fn.cppClassName = def.cppClassName; - }); -}); - -// Process enums -_(enums).forEach(function(enumerable) { - output.some(function(obj) { - if (enumerable.typeName.indexOf(obj.typeName) == 0) { - enumerable.owner = obj.jsClassName; - } - else if (enumerable.owner) { - return true; - } - }); - - var override = descriptor.enums[enumerable.typeName] || {}; - - enumerable.owner = override.owner || enumerable.owner || "Enums"; - - enumerable.JsName = enumerable.typeName - .replace(new RegExp("^" + enumerable.owner.toLowerCase()), "") - .replace(/^_/, "") - .toUpperCase(); - - enumerable.values.forEach(function(value) { - value.JsName = value.name - .replace(/^GIT_/, "") - .replace(override.removeString || "", "") - .replace(new RegExp("^" + enumerable.owner.toUpperCase()), "") - .replace(/^_/, "") - .replace(new RegExp("^" + enumerable.JsName), "") - .replace(/^_/, "") - .toUpperCase(); - - if (override.values && override.values[value.name]) { - _.merge(value, override.values[value.name]); - } - }); - - _.merge(enumerable, _.omit(override, ["values"])); - - output.push(enumerable); -}); - -output = _.sortBy(output, "typeName"); - -if (process.argv[2] != "--documentation") { - utils.filterDocumentation(output); -} - -fs.writeFileSync(path.join(__dirname, "idefs.json"), - JSON.stringify(output, null, 2)); diff --git a/generate/templates/class_content.cc b/generate/templates/class_content.cc deleted file mode 100644 index a4be2bf63..000000000 --- a/generate/templates/class_content.cc +++ /dev/null @@ -1,130 +0,0 @@ -// This is a generated file, modify: generate/templates/class_content.cc. -#include -#include - -extern "C" { -#include -{%each cDependencies as dependency %} -#include <{{ dependency }}> -{%endeach%} -} - - -#include "../include/functions/copy.h" -#include "../include/macros.h" -#include "../include/{{ filename }}.h" - -{%each dependencies as dependency%} -#include "{{ dependency }}" -{%endeach%} - -#include - -using namespace std; -using namespace v8; -using namespace node; - -{%if cType%} -{{ cppClassName }}::{{ cppClassName }}({{ cType }} *raw, bool selfFreeing) { - this->raw = raw; - this->selfFreeing = selfFreeing; -} - -{{ cppClassName }}::~{{ cppClassName }}() { - {%if freeFunctionName%} - if (this->selfFreeing) { - {{ freeFunctionName }}(this->raw); - } - {%endif%} -} - -void {{ cppClassName }}::InitializeComponent(Handle target) { - NanScope(); - - Local tpl = NanNew(New); - - tpl->InstanceTemplate()->SetInternalFieldCount(1); - tpl->SetClassName(NanNew("{{ jsClassName }}")); - - {%each functions as function%} - {%if not function.ignore%} - {%if function.isPrototypeMethod%} - NODE_SET_PROTOTYPE_METHOD(tpl, "{{ function.jsFunctionName }}", {{ function.cppFunctionName }}); - {%else%} - NODE_SET_METHOD(tpl, "{{ function.jsFunctionName }}", {{ function.cppFunctionName }}); - {%endif%} - {%endif%} - {%endeach%} - - {%each fields as field%} - {%if not field.ignore%} - NODE_SET_PROTOTYPE_METHOD(tpl, "{{ field.jsFunctionName }}", {{ field.cppFunctionName }}); - {%endif%} - {%endeach%} - - Local _constructor_template = tpl->GetFunction(); - NanAssignPersistent(constructor_template, _constructor_template); - target->Set(NanNew("{{ jsClassName }}"), _constructor_template); -} - -NAN_METHOD({{ cppClassName }}::New) { - NanScope(); - - if (args.Length() == 0 || !args[0]->IsExternal()) { - {%if createFunctionName%} - return NanThrowError("A new {{ cppClassName }} cannot be instantiated. Use {{ jsCreateFunctionName }} instead."); - {%else%} - return NanThrowError("A new {{ cppClassName }} cannot be instantiated."); - {%endif%} - } - - {{ cppClassName }}* object = new {{ cppClassName }}(static_cast<{{ cType }} *>(Handle::Cast(args[0])->Value()), args[1]->BooleanValue()); - object->Wrap(args.This()); - - NanReturnValue(args.This()); -} - -Handle {{ cppClassName }}::New(void *raw, bool selfFreeing) { - NanEscapableScope(); - Handle argv[2] = { NanNew((void *)raw), NanNew(selfFreeing) }; - return NanEscapeScope(NanNew({{ cppClassName }}::constructor_template)->NewInstance(2, argv)); -} - -{{ cType }} *{{ cppClassName }}::GetValue() { - return this->raw; -} - -{{ cType }} **{{ cppClassName }}::GetRefValue() { - return &this->raw; -} -{%else%} -void {{ cppClassName }}::InitializeComponent(Handle target) { - NanScope(); - - Local object = NanNew(); - - {%each functions as function%} - {%if not function.ignore%} - NODE_SET_METHOD(object, "{{ function.jsFunctionName }}", {{ function.cppFunctionName }}); - {%endif%} - {%endeach%} - - target->Set(NanNew("{{ jsClassName }}"), object); -} -{%endif%} - -{%each functions as function %} - {%if not function.ignore%} - {%if function.isAsync%} - {%partial asyncFunction function %} - {%else%} - {%partial syncFunction function %} - {%endif%} - {%endif%} -{%endeach%} - -{%partial fields .%} - -{%if not cTypeIsUndefined %} -Persistent {{ cppClassName }}::constructor_template; -{%endif%} diff --git a/generate/templates/enums.js b/generate/templates/enums.js deleted file mode 100644 index 210ddcf91..000000000 --- a/generate/templates/enums.js +++ /dev/null @@ -1,16 +0,0 @@ -var NodeGit = require("../"); -NodeGit.Enums = {}; - -/* jshint ignore:start */ -{% each . as enumerable %} -{% if enumerable.type == "enum" %} -NodeGit.{{ enumerable.owner }}.{{ enumerable.JsName }} = { -{% each enumerable.values as value %} - {{ value.JsName }}: {{ value.value }}, -{% endeach %} -}; - -{% endif %} -{% endeach %} - -/* jshint ignore:end */ diff --git a/generate/templates/struct_header.h b/generate/templates/struct_header.h deleted file mode 100644 index 006dd96d7..000000000 --- a/generate/templates/struct_header.h +++ /dev/null @@ -1,85 +0,0 @@ -#ifndef {{ cppClassName|upper }}_H -#define {{ cppClassName|upper }}_H -// generated from struct_header.h -#include -#include - -extern "C" { -#include -{%each cDependencies as dependency %} -#include <{{ dependency }}> -{%endeach%} -} - -{%each dependencies as dependency%} -#include "{{ dependency }}" -{%endeach%} - -using namespace node; -using namespace v8; - -class {{ cppClassName }} : public ObjectWrap { - public: - {{ cppClassName }}({{ cType }}* raw, bool selfFreeing); - static Persistent constructor_template; - static void InitializeComponent (Handle target); - - {{ cType }} *GetValue(); - {{ cType }} **GetRefValue(); - - static Handle New(void *raw, bool selfFreeing); - - bool selfFreeing; - - {%each fields as field %} - {%if not field.ignore %} - {%if field.isCallbackFunction %} - static {{ field.returnType }} {{ field.name }}_cppCallback ( - {%each field.args|argsInfo as arg %} - {{ arg.cType }} {{ arg.name}}{%if not arg.lastArg %},{%endif%} - {%endeach%} - ); - static void {{ field.name }}_asyncWork(uv_work_t* req); - static void {{ field.name }}_asyncAfter(uv_work_t* req, int status); - static void {{ field.name }}_asyncPromisePolling(uv_work_t* req, int status); - struct {{ field.name|titleCase }}Baton { - {%each field.args|argsInfo as arg %} - {{ arg.cType }} {{ arg.name}}; - {%endeach%} - uv_work_t req; - {{ field.returnType }} result; - Persistent promise; - bool done; - }; - {%endif%} - {%endif%} - {%endeach%} - - private: - {{ cppClassName }}(); - ~{{ cppClassName }}(); - - void ConstructFields(); - - static NAN_METHOD(New); - - {%each fields as field%} - {%if not field.ignore%} - {%if not field.isEnum %} - {%if field.isLibgitType %} - Persistent {{ field.name }}; - {%elsif field.isCallbackFunction %} - NanCallback* {{ field.name }}; - {%elsif field.payloadFor %} - Persistent {{ field.name }}; - {%endif%} - {%endif%} - static NAN_GETTER(Get{{ field.cppFunctionName }}); - static NAN_SETTER(Set{{ field.cppFunctionName }}); - {%endif%} - {%endeach%} - - {{ cType }} *raw; -}; - -#endif diff --git a/generate/util/file.js b/generate/util/file.js deleted file mode 100644 index c595a7687..000000000 --- a/generate/util/file.js +++ /dev/null @@ -1,24 +0,0 @@ -const fs = require("fs"); -const path = require("path"); - -// Make a locally bound path joiner. -var local = path.join.bind(null, __dirname, "../"); - -exports.read = function(file) { - try { - return fs.readFileSync(local(file)).toString(); - } - catch (unhandledException) { - return ""; - } -}; - -exports.write = function(file, contents) { - try { - fs.writeFileSync(local(file), contents); - return true; - } - catch (unhandledException) { - return false; - } -}; diff --git a/install.js b/install.js index 5123c1fe4..235e1268a 100644 --- a/install.js +++ b/install.js @@ -7,7 +7,8 @@ var Promise = require("nodegit-promise"); var promisify = require("promisify-node"); var request = require("request"); var fse = promisify(require("fs-extra")); -fse.ensureDir = promisify(fse.ensureDir); +var findParentDir = promisify(require('find-parent-dir')); +fse.ensureDir = promisify(fse.ensureDir, function() { return true; }); var exec = promisify(function(command, opts, callback) { return require("child_process").exec(command, opts, callback); @@ -17,6 +18,7 @@ var NODE_VERSION = Number(process.version.match(/^v(\d+\.\d+)/)[1]); // If the build only flag is set. var buildOnly = process.env.BUILD_ONLY; +var nodeWebkit = false; // This will take in an object and find any matching keys in the environment // to use as overrides. @@ -46,7 +48,7 @@ function systemPath(parts) { } // Will be used near the end to configure `node-gyp`. -var pythonPath = '/usr/bin/python'; +var pythonPath = ""; var local = path.join.bind(path, __dirname); @@ -56,12 +58,7 @@ var paths = envOverride({ libgit2: local("vendor/libgit2/"), libssh2: local("vendor/libssh2/"), http_parser: local("vendor/http_parser/"), - sys: { - include: local("include/sys/"), - src: local("src/sys/"), - build: local("build/Release/obj.target/src/sys/") - }, - release: local("build/Release/") + release: local("build/Release/"), }); // Load the package.json. @@ -72,112 +69,148 @@ if (NODE_VERSION === 0.1) { } fse.ensureDir(path.resolve(__dirname, paths.release)) +.then(detectNodeWebkit.call(null, __dirname)) .then(fetch) -.then(finish, compile) -.done() +.then(finish, compile); function fetch() { - if (!buildOnly) { - console.info("[nodegit] Fetching binary from S3."); + console.info("[nodegit] Fetching binary from S3."); + + if (nodeWebkit) { + throw new Error("Must build for node-webkit"); + } - // Using the node-pre-gyp module, attempt to fetch a compatible build. - return exec("node-pre-gyp install"); + if (buildOnly) { + throw new Error("BUILD_ONLY is set to true, no fetching allowed."); } - throw new Error("BUILD_ONLY is set to true, no fetching allowed."); + // Using the node-pre-gyp module, attempt to fetch a compatible build. + return exec("node-pre-gyp install"); } -function compile() { - if (!buildOnly) { +function compile(err) { + if (buildOnly || nodeWebkit) { + console.info("[nodegit] " + err.message); + } + else { console.info("[nodegit] Failed to install prebuilt, attempting compile."); } console.info("[nodegit] Determining dependencies."); - return python() - .then(getVendorLib("libgit2", "https://github.com/libgit2/libgit2/tarball/" + pkg.libgit2.sha)) - .then(getVendorLib("libssh2", pkg.libssh2.url)) - .then(getVendorLib("http_parser", pkg.http_parser.url)) - .then(buildNative) - .then(finish, fail); - + return Promise.all([ + python(), + getVendorLib("libgit2", "https://github.com/libgit2/libgit2/tarball/" + pkg.libgit2.sha), + getVendorLib("libssh2", pkg.libssh2.url), + getVendorLib("http_parser", pkg.http_parser.url), + guardGenerated() + ]) + .then(buildNative) + .then(finish, fail); } function python() { - return exec("which python2") + var pathFinderCommand = process.platform === "win32" ? "where" : "which"; + + return exec(pathFinderCommand + " python2") .then(function(which){ return which; }, function(err) { return null; }) .then(function(path) { - return path || exec("which python"); + return path || exec(pathFinderCommand + " python"); }) - .then(function(which) { - return which; + .then(function(path) { + return path; }, function(err) { return null; }) .then(function(path) { - pythonPath = path.trim(); - if (!pythonPath) { + if (!path) { throw new Error("Python is required to build libgit2."); } + return path.trim(); + }, function(err) { + throw new Error("Error finding python."); }) - .then(function() { - return exec(pythonPath + " -V 2>&1"); + .then(function(path) { + pythonPath = path; + return exec(path + " -V 2>&1"); }) .then(function(version) { - if (version[1].indexOf("Python 3") === 0) { + if (version.trim().indexOf("Python 3") === 0) { throw new Error("Incorrect version of Python, gyp requires < 3."); } }); } function getVendorLib(name, url) { - return function() { - var version = pkg[name].sha || pkg[name].version; - console.info("[nodegit] Detecting vendor/" + name + "."); - if (fse.existsSync(paths[name] + version)) { - console.info("[nodegit] vendor/" + name + " already exists."); - return new Promise(function(resolve, reject) {resolve() }); - } - else { - console.info("[nodegit] Removing outdated vendor/" + name + "."); - return fse.remove(paths[name]) - .then(function() { - return new Promise(function (resolve, reject) { - - console.info("[nodegit] Fetching vendor/" + name + "."); + var version = pkg[name].sha || pkg[name].version; + console.info("[nodegit] Detecting vendor/" + name + "."); + if (fse.existsSync(paths[name] + version)) { + console.info("[nodegit] vendor/" + name + " already exists."); + return Promise.resolve(); + } + else { + console.info("[nodegit] Removing outdated vendor/" + name + "."); + return fse.remove(paths[name]) + .then(function() { + return new Promise(function (resolve, reject) { - var extract = tar.Extract({ - path: paths[name], - strip: true - }); + console.info("[nodegit] Fetching vendor/" + name + "."); - request.get(url).pipe(zlib.createUnzip()).pipe(extract) - .on("error", reject) - .on("end", resolve); + var extract = tar.Extract({ + path: paths[name], + strip: true }); - }).then(function() { - return fse.writeFile(paths[name] + version, ""); - }).then(function() { - if ((name == "libssh2") && (process.platform !== "win32")) { - return exec(paths[name] + "configure", {cwd: paths[name]}); - } + request.get(url).pipe(zlib.createUnzip()).pipe(extract) + .on("error", reject) + .on("end", resolve); }); - } + }).then(function() { + return fse.writeFile(paths[name] + version, ""); + }).then(function() { + if ((name == "libssh2") && (process.platform !== "win32")) { + return exec(paths[name] + "configure", {cwd: paths[name]}); + } + }); } } +function guardGenerated() { + return Promise.all([ + fse.stat(path.resolve(__dirname, "src/")), + fse.stat(path.resolve(__dirname, "include/")) + ]).then(function() { + return Promise.resolve(); + }, function() { + console.info("[nodegit] C++ files not found, generating now."); + console.info("[nodegit] Installing all devDependencies"); + return exec("npm install --ignore-scripts --dont-prepublish") + .then(function() { + return exec("node generate"); + }); + }); +} + function buildNative() { return exec("cd " + __dirname).then(function() { - console.info("[nodegit] Building native node module."); - var pythonFlag = " --python \"" + pythonPath + "\""; - var cmd = path.resolve(systemPath([ - ".", "node_modules", ".bin", "node-gyp clean configure build" + pythonFlag - ])); + if (nodeWebkit) { + console.info("[nodegit] Building native node-webkit module."); + } + else { + console.info("[nodegit] Building native node module."); + } + + var builder = nodeWebkit ? "nw-gyp" : "node-gyp"; + + var cmd = path.resolve(".", "node_modules", ".bin", builder) + + " clean configure " + + (nodeWebkit ? "--target=\"" + nodeWebkit + "\"": "") + + " build --python \"" + pythonPath + "\"" + var opts = { cwd: __dirname, maxBuffer: Number.MAX_VALUE @@ -186,14 +219,42 @@ function buildNative() { }) } +function detectNodeWebkit(directory) { + if (directory) { + var pkg = require(path.resolve(directory, "package.json")); + + nodeWebkit = pkg.engines && pkg.engines["node-webkit"]; + + return findParentDir(path.resolve(directory, ".."), "package.json") + .then(detectNodeWebkit); + } + else { + return Promise.resolve(); + } +} + function finish() { console.info("[nodegit] Completed installation successfully."); - return Promise.resolve().done(); + if (!buildOnly) { + console.info("[nodegit] Cleaning up"); + return Promise.all([ + fse.remove(path.resolve(__dirname, "src")), + fse.remove(path.resolve(__dirname, "include")), + fse.remove(path.resolve(__dirname, "generate/output")), + fse.remove(path.resolve(__dirname, paths.libgit2)), + fse.remove(path.resolve(__dirname, paths.libssh2)), + fse.remove(path.resolve(__dirname, paths.http_parser)) + // exec("npm prune --production") + ]).done(); + } + else { + return Promise.resolve().done(); + } } function fail(message) { console.info("[nodegit] Failed to build and install nodegit."); console.info(message.message); - //console.info(message.stack); + return Promise.resolve().done(); } diff --git a/lib/blob.js b/lib/blob.js index 4f8527273..842f2e114 100644 --- a/lib/blob.js +++ b/lib/blob.js @@ -1,8 +1,18 @@ var NodeGit = require("../"); var TreeEntry = require("./tree_entry"); +var LookupWrapper = require("./util/lookupWrapper"); var Blob = NodeGit.Blob; +/** +* Retrieves the blob pointed to by the oid +* @param {Repository} repo The repo that the blob lives in +* @param {String|Oid|Blob} id The blob to lookup +* @param {Function} callback +* @return {Blob} +*/ +Blob.lookup = LookupWrapper(Blob); + /** * Retrieve the content of the Blob. * diff --git a/lib/commit.js b/lib/commit.js index 55cebfb45..9a8a9a4fe 100644 --- a/lib/commit.js +++ b/lib/commit.js @@ -1,9 +1,19 @@ var events = require("events"); var Promise = require("nodegit-promise"); var NodeGit = require("../"); +var LookupWrapper = require("./util/lookupWrapper"); var Commit = NodeGit.Commit; +/** + * Retrieves the commit pointed to by the oid + * @param {Repository} repo The repo that the commit lives in + * @param {String|Oid|Commit} id The commit to lookup + * @param {Function} callback + * @return {Commit} + */ +Commit.lookup = LookupWrapper(Commit); + /** * Retrieve the SHA. * @return {String} diff --git a/lib/merge.js b/lib/merge.js index cd96c88ce..fb9a8b0b5 100644 --- a/lib/merge.js +++ b/lib/merge.js @@ -1,5 +1,6 @@ var NodeGit = require("../"); var normalizeOptions = require("./util/normalize_options"); +var Promise = require("nodegit-promise"); var Merge = NodeGit.Merge; var mergeCommits = Merge.commits; @@ -16,7 +17,12 @@ var mergeCommits = Merge.commits; Merge.commits = function(repo, ourCommit, theirCommit, options) { options = normalizeOptions(options, NodeGit.MergeOptions); - return mergeCommits.call(this, repo, ourCommit, theirCommit, options); + return Promise.all([ + repo.getCommit(ourCommit), + repo.getCommit(theirCommit) + ]).then(function(commits) { + return mergeCommits.call(this, repo, commits[0], commits[1], options); + }); }; module.exports = Merge; diff --git a/lib/odb.js b/lib/odb.js index 12e7c82c1..72d26af52 100644 --- a/lib/odb.js +++ b/lib/odb.js @@ -1,12 +1,9 @@ var git = require("../"); -var normalizeOid = require("./util/normalize_oid"); var Odb = git.Odb; var read = Odb.prototype.read; Odb.prototype.read = function(oid, callback) { - oid = normalizeOid(oid); - return read.call(this, oid).then(function(odbObject) { if (typeof callback === "function") { callback(null, odbObject); diff --git a/lib/reference.js b/lib/reference.js index eab158eb0..c772239fd 100644 --- a/lib/reference.js +++ b/lib/reference.js @@ -1,8 +1,27 @@ var NodeGit = require("../"); +var LookupWrapper = require("./util/lookupWrapper"); var Reference = NodeGit.Reference; var Branch = NodeGit.Branch; +/** +* Retrieves the reference pointed to by the oid +* @param {Repository} repo The repo that the reference lives in +* @param {String|Reference} id The reference to lookup +* @param {Function} callback +* @return {Reference} +*/ +Reference.lookup = LookupWrapper(Reference); + +/** +* Retrieves the reference by it's short name +* @param {Repository} repo The repo that the reference lives in +* @param {String|Reference} id The reference to lookup +* @param {Function} callback +* @return {Reference} +*/ +Reference.dwim = LookupWrapper(Reference, Reference.dwim); + /** * Returns true if this reference is valid * @return {Boolean} diff --git a/lib/repository.js b/lib/repository.js index f42314724..60a8d6561 100644 --- a/lib/repository.js +++ b/lib/repository.js @@ -1,5 +1,4 @@ var NodeGit = require("../"); -var normalizeOid = require("./util/normalize_oid"); var Blob = require("./blob"); var Tree = require("./tree"); var Tag = require("./tag"); @@ -8,6 +7,7 @@ var Revwalk = require("./revwalk"); var Commit = require("./commit"); var Remote = require("./remote"); var Promise = require("nodegit-promise"); +var normalizeOptions = require("./util/normalize_options"); var TreeBuilder = NodeGit.Treebuilder; var Repository = NodeGit.Repository; @@ -54,35 +54,17 @@ function(name, commit, force, signature, logMessage) { }; /** - * Look up a branch + * Look up a refs's commit. * - * @param {String} name Branch name, e.g. "master" - * @param {Function} callback - * @return {Ref} - */ -Repository.prototype.getBranch = function(name, callback) { - name = ~name.indexOf("refs/heads/") ? name : "refs/heads/" + name; - - return this.getReference(name).then(function(reference) { - if (typeof callback === "function") { - callback(null, reference); - } - - return reference; - }, callback); -}; - -/** - * Look up a branch's most recent commit. - * - * @param {String} name Branch name, e.g. "master" + * @param {String|Ref} name Ref name, e.g. "master", "refs/heads/master" + * or Branch Ref * @param {Function} callback * @return {Commit} */ -Repository.prototype.getBranchCommit = function(name, callback) { +Repository.prototype.getReferenceCommit = function(name, callback) { var repository = this; - return this.getBranch(name).then(function(reference) { + return this.getReference(name).then(function(reference) { return repository.getCommit(reference.target()).then(function(commit) { if (typeof callback === "function") { callback(null, commit); @@ -94,34 +76,52 @@ Repository.prototype.getBranchCommit = function(name, callback) { }; /** - * Lists out the remotes in the given repository. - * - * @param {Function} Optional callback - * @return {Object} Promise object. - */ -Repository.prototype.getRemotes = function(callback) { - return Remote.list(this).then(function(remotes) { - if (typeof callback === "function") { - callback(null, remotes); - } +* Look up a branch. Alias for `getReference` +* +* @param {String|Ref} name Ref name, e.g. "master", "refs/heads/master" +* or Branch Ref +* @param {Function} callback +* @return {Ref} +*/ +Repository.prototype.getBranch = function(name, callback) { + return this.getReference(name, callback); +}; - return remotes; - }, callback); +/** +* Look up a branch's most recent commit. Alias to `getReferenceCommit` +* +* @param {String|Ref} name Ref name, e.g. "master", "refs/heads/master" +* or Branch Ref +* @param {Function} callback +* @return {Commit} +*/ +Repository.prototype.getBranchCommit = function(name, callback) { + return this.getReferenceCommit(name, callback); +}; + +/** + * Gets the branch that HEAD currently points to + * Is an alias to head() + * @return {Reference} + */ +Repository.prototype.getCurrentBranch = function() { + return this.head(); }; /** * Lookup the reference with the given name. * - * @param {String} name + * @param {String|Ref} name Ref name, e.g. "master", "refs/heads/master" + * or Branch Ref * @param {Function} callback * @return {Reference} */ Repository.prototype.getReference = function(name, callback) { var repository = this; - return Reference.lookup(this, name).then(function(reference) { + return Reference.dwim(this, name).then(function(reference) { if (reference.isSymbolic()) { - return reference.resolve(function (error, reference) { + return reference.resolve().then(function(reference) { reference.repo = repository; if (typeof callback === "function") { @@ -129,7 +129,7 @@ Repository.prototype.getReference = function(name, callback) { } return reference; - }); + }, callback); } else { reference.repo = repository; if (typeof callback === "function") { @@ -194,8 +194,6 @@ Repository.prototype.getReferenceNames = function(type, callback) { * @return {Commit} */ Repository.prototype.getCommit = function(oid, callback) { - oid = normalizeOid(oid); - var repository = this; return Commit.lookup(repository, oid).then(function(commit) { @@ -217,7 +215,6 @@ Repository.prototype.getCommit = function(oid, callback) { * @return {Blob} */ Repository.prototype.getBlob = function(oid, callback) { - oid = normalizeOid(oid); var repository = this; return Blob.lookup(repository, oid).then(function(blob) { @@ -239,8 +236,6 @@ Repository.prototype.getBlob = function(oid, callback) { * @return {Tree} */ Repository.prototype.getTree = function(oid, callback) { - oid = normalizeOid(oid); - var repository = this; return Tree.lookup(repository, oid).then(function(tree) { @@ -262,8 +257,6 @@ Repository.prototype.getTree = function(oid, callback) { * @return {Tag} */ Repository.prototype.getTag = function(oid, callback) { - oid = normalizeOid(oid); - var repository = this; return Tag.lookup(repository, oid).then(function(reference) { @@ -326,6 +319,20 @@ Repository.prototype.getMasterCommit = function(callback) { return this.getBranchCommit("master", callback); }; +/** + * Retrieve the commit that HEAD is currently pointing to + * + * @param {Function} callback + * @return {Commit} + */ +Repository.prototype.getHeadCommit = function(callback) { + var repo = this; + + return Reference.nameToId(repo, "HEAD").then(function(head) { + return repo.getCommit(head, callback); + }); +}; + /** * Create a commit * @@ -341,40 +348,40 @@ Repository.prototype.getMasterCommit = function(callback) { Repository.prototype.createCommit = function( updateRef, author, committer, message, tree, parents, callback) { - var createCommit = null; var repo = this; + var promises = []; - if (tree instanceof Tree) { - createCommit = Promise.all([ - Commit.create( - repo, - updateRef, - author, - committer, - null /* use default message encoding */, - message, - tree, - parents.length, - parents - ) - ]); - } else { - createCommit = this.getTree(tree).then(function(tree) { - return Commit.create( - repo, - updateRef, - author, - committer, - null /* use default message encoding */, - message, - tree, - parents.length, - parents - ); - }); - } + parents = parents || []; + + promises.push(repo.getTree(tree)); + + parents.forEach(function(parent) { + promises.push(repo.getCommit(parent)); + }); + + return Promise.all(promises).then(function(results) { + tree = results[0]; - return createCommit.then(function(commit) { + // Get the normalized values for our input into the function + var parentsLength = parents.length; + parents = []; + + for (var i = 0; i < parentsLength; i++) { + parents.push(results[i + 1]); + } + + return Commit.create( + repo, + updateRef, + author, + committer, + null /* use default message encoding */, + message, + tree, + parents.length, + parents + ); + }).then(function(commit) { if (typeof callback === "function") { callback(null, commit); } @@ -383,6 +390,48 @@ Repository.prototype.createCommit = function( }, callback); }; +/** + * Creates a new commit on HEAD from the list of passed in files + * @param {Array} filesToAdd + * @param {Signature} author + * @param {Signature} committer + * @param {String} message + * @param {Function} callback + * @return {Oid} The oid of the new commit + */ +Repository.prototype.createCommitOnHead = function( + filesToAdd, + author, + committer, + message, + callback){ + var repo = this; + + return repo.openIndex().then(function(index) { + index.read(true); + + filesToAdd.forEach(function(filePath) { + index.addByPath(filePath); + }); + + index.write(); + + return index.writeTree(); + }).then(function(treeOid) { + + return repo.getHeadCommit().then(function(parent) { + return repo.createCommit( + "HEAD", + author, + committer, + message, + treeOid, + [parent], + callback); + }); + }, callback); +}; + /** * Create a blob from a buffer * @@ -408,4 +457,196 @@ Repository.prototype.treeBuilder = function() { return builder; }; +/** + * Gets the default signature for the default user and now timestamp + * @return {Signature} + */ +Repository.prototype.defaultSignature = function() { + return NodeGit.Signature.default(this); +}; + +/** +* Lists out the remotes in the given repository. +* +* @param {Function} Optional callback +* @return {Object} Promise object. +*/ +Repository.prototype.getRemotes = function(callback) { + return Remote.list(this).then(function(remotes) { + if (typeof callback === "function") { + callback(null, remotes); + } + + return remotes; + }, callback); +}; + +/** + * Gets a remote from the repo + * + * @param {String|Remote} remote + * @param {Function} callback + * @return {Remote} The remote object + */ +Repository.prototype.getRemote = function(remote, callback) { + if (remote instanceof NodeGit.Remote) { + return Promise.resolve(remote).then(function(remoteObj) { + if (typeof callback === "function") { + callback(null, remoteObj); + } + + return remoteObj; + }, callback); + } + + return NodeGit.Remote.load(this, remote).then(function(remoteObj) { + if (typeof callback === "function") { + callback(null, remoteObj); + } + + return remoteObj; + }, callback); +}; + +/** + * Fetches from a remote + * + * @param {String|Remote} remote + */ +Repository.prototype.fetch = function( + remote, + remoteCallbacks, + ignoreCertErrors, + callback) +{ + var repo = this; + + return repo.getRemote(remote).then(function(remote) { + remote.setCallbacks(remoteCallbacks); + remote.checkCert(ignoreCertErrors ? 0 : 1); + + return remote.fetch(repo.defaultSignature(), "Fetch from " + remote) + .then(function() { + if (typeof callback === "function") { + callback(); + } + }); + }, callback); +}; + +/** + * Fetches from all remotes + */ +Repository.prototype.fetchAll = function( + remoteCallbacks, + ignoreCertErrors, + callback) +{ + var repo = this; + + return repo.getRemotes().then(function(remotes) { + var fetchPromises = []; + + remotes.forEach(function(remote) { + fetchPromises.push( + repo.fetch(remote, remoteCallbacks, ignoreCertErrors, callback)); + }); + + return Promise.all(fetchPromises); + }, callback); +}; + +/** + * Merge a branch onto another branch + * + * @param {String|Ref} from + * @param {String|Ref} to + * @return {Oid|Index} A commit id for a succesful merge or an index for a + * merge with conflicts + */ +Repository.prototype.mergeBranches = function(to, from, signature) { + var repo = this; + var fromBranch; + var toBranch; + + signature = signature || repo.defaultSignature(); + + return Promise.all([ + repo.getBranch(to), + repo.getBranch(from) + ]).then(function(branches) { + toBranch = branches[0]; + fromBranch = branches[1]; + + return Promise.all([ + repo.getBranchCommit(toBranch), + repo.getBranchCommit(fromBranch) + ]); + }).then(function(branchCommits) { + var toCommitOid = branchCommits[0].toString(); + var fromCommitOid = branchCommits[1].toString(); + + return NodeGit.Merge.base(repo, toCommitOid, fromCommitOid) + .then(function(baseCommit) { + if (baseCommit.toString() == fromCommitOid) { + // The commit we're merging to is already in our history. + // nothing to do so just return the commit the branch is on + return toCommitOid; + } + else if (baseCommit.toString() == toCommitOid) { + // fast forward + var message = + "Fast forward branch " + + toBranch.shorthand() + + " to branch " + + fromBranch.shorthand(); + + return toBranch.setTarget( + fromCommitOid, + signature, + message) + .then(function() { + return fromCommitOid; + }); + } + else { + // We have to merge. Lets do it! + return NodeGit.Merge.commits(repo, toCommitOid, fromCommitOid) + .then(function(index) { + // if we have conflicts then throw the index + if (index.hasConflicts()) { + throw index; + } + + // No conflicts so just go ahead with the merge + index.write(); + return index.writeTreeTo(repo); + }).then(function(oid) { + var message = + "Merged " + + fromBranch.shorthand() + + " into " + + toBranch.shorthand(); + + return repo.createCommit( + toBranch.name(), + signature, + signature, + message, + oid, + [toCommitOid, fromCommitOid]); + }); + } + }); + }); +}; + +// Override Repository.initExt to normalize initoptions +var initExt = Repository.initExt; +Repository.initExt = function(repo_path, opts) { + opts = normalizeOptions(opts, NodeGit.RepositoryInitOptions); + return initExt(repo_path, opts); +}; + + module.exports = Repository; diff --git a/lib/revwalk.js b/lib/revwalk.js index cf4875bc4..37e24d9f6 100644 --- a/lib/revwalk.js +++ b/lib/revwalk.js @@ -1,6 +1,4 @@ var NodeGit = require("../"); -var normalizeOid = require("./util/normalize_oid"); - var Revwalk = NodeGit.Revwalk; var oldSorting = Revwalk.prototype.sorting; @@ -30,8 +28,6 @@ Revwalk.prototype.sorting = function() { * @return {Commit} */ Revwalk.prototype.walk = function(oid, callback) { - oid = normalizeOid(oid); - var revwalk = this; this.push(oid); diff --git a/lib/tag.js b/lib/tag.js index ebbe71af6..2a6c51f90 100644 --- a/lib/tag.js +++ b/lib/tag.js @@ -1,5 +1,15 @@ var git = require("../"); +var LookupWrapper = require("./util/lookupWrapper"); var Tag = git.Tag; +/** +* Retrieves the tag pointed to by the oid +* @param {Repository} repo The repo that the tag lives in +* @param {String|Oid|Tag} id The tag to lookup +* @param {Function} callback +* @return {Tag} +*/ +Tag.lookup = LookupWrapper(Tag); + module.exports = Tag; diff --git a/lib/tree.js b/lib/tree.js index 9abb4662d..ca35af90a 100644 --- a/lib/tree.js +++ b/lib/tree.js @@ -3,6 +3,16 @@ var Tree = git.Tree; var Treebuilder = git.Treebuilder; var Diff = git.Diff; var events = require("events"); +var LookupWrapper = require("./util/lookupWrapper"); + +/** +* Retrieves the tree pointed to by the oid +* @param {Repository} repo The repo that the tree lives in +* @param {String|Oid|Tree} id The tree to lookup +* @param {Function} callback +* @return {Tree} +*/ +Tree.lookup = LookupWrapper(Tree); /** * Diff two trees diff --git a/lib/util/lookupWrapper.js b/lib/util/lookupWrapper.js new file mode 100644 index 000000000..a0c66ffec --- /dev/null +++ b/lib/util/lookupWrapper.js @@ -0,0 +1,37 @@ +var Promise = require("nodegit-promise"); + +/** +* Wraps a method so that you can pass in either a string, OID or the object +* itself and you will always get back a promise that resolves to the object. +* @param {Object} objectType The object type that you're expecting to receive. +* @param {Function} lookupFunction The function to do the lookup for the +* object. Defaults to `objectType.lookup`. +* @return {Function} +*/ +module.exports = function(objectType, lookupFunction) { + lookupFunction = lookupFunction || objectType.lookup; + + return function(repo, id, callback) { + if (id instanceof objectType) { + return Promise.resolve(id).then(function(obj) { + obj.repo = repo; + + if (typeof callback === "function") { + callback(null, obj); + } + + return obj; + }, callback); + } + + return lookupFunction(repo, id).then(function(obj) { + obj.repo = repo; + + if (typeof callback === "function") { + callback(null, obj); + } + + return obj; + }, callback); + }; +}; diff --git a/lib/util/normalize_oid.js b/lib/util/normalize_oid.js deleted file mode 100644 index f08efcc07..000000000 --- a/lib/util/normalize_oid.js +++ /dev/null @@ -1,18 +0,0 @@ -var NodeGit = require("../../"); - -/** - * Normalize an identifier to always be an OID instance. - * - * @param {String, Object} oid - The oid string or instance. - * @return {Object} An Oid instance. - */ -function normalizeOid(oid) { - try { - return typeof oid === "string" ? NodeGit.Oid.fromString(oid) : oid; - } - catch (ex) { - return null; - } -} - -module.exports = normalizeOid; diff --git a/package.json b/package.json index 9e0ec2a13..427958e7d 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "nodegit", "description": "Node.js libgit2 asynchronous native bindings", - "version": "0.2.3", + "version": "0.2.4", "libgit2": { "sha": "4af08d9f69f151f6362df51d7d7f41527e2af05c", "version": "0.21.2" @@ -60,20 +60,25 @@ "node-pre-gyp" ], "dependencies": { - "combyne": "~0.6.2", + "find-parent-dir": "^0.3.0", "fs-extra": "^0.12.0", - "lodash": "^2.4.1", - "istanbul": "~0.3.2", - "jshint": "~2.5.6", - "mocha": "~1.21.4", "nan": "~1.3.0", "node-gyp": "~1.0.2", "node-pre-gyp": "~0.5.27", "nodegit-promise": "~1.0.0", + "nw-gyp": "^0.12.4", "promisify-node": "~0.1.2", "request": "~2.45.0", "tar": "~1.0.1" }, + "devDependencies": { + "mocha": "~1.21.4", + "combyne": "~0.6.2", + "istanbul": "~0.3.2", + "js-beautify": "^1.5.4", + "jshint": "~2.5.6", + "lodash": "^2.4.1" + }, "binary": { "module_name": "nodegit", "module_path": "./build/Release/", @@ -83,12 +88,15 @@ "lint": "jshint lib test/tests", "cov": "node test", "mocha": "mocha test/runner test/tests", + "mochaDebug": "mocha --debug-brk test/runner test/tests", "test": "npm run lint && npm run cov", - "missing-tests": "node generate/missing-tests", + "generateJson": "node generate/scripts/generateJson", + "generateNativeCode": "node generate/scripts/generateNativeCode", + "generateMissingTests": "node generate/scripts/generateMissingTests", + "prepublish": "node prepublish", "publish": "node-pre-gyp package && node-pre-gyp publish", - "generate": "node generate/setup && node generate", - "install": "npm run generate && node install", + "install": "node install", "recompile": "BUILD_ONLY=true npm install", - "rebuild": "BUILD_ONLY=true npm run generate && node-gyp configure build" + "rebuild": "BUILD_ONLY=true node generate && node-gyp configure build" } } diff --git a/prepublish.js b/prepublish.js new file mode 100644 index 000000000..7fddbecdd --- /dev/null +++ b/prepublish.js @@ -0,0 +1,8 @@ +var exec = require('child_process').exec; +try { + require("./build/Release/nodegit"); + console.info("[nodegit] Nothing to do.") +} +catch (e) { + exec("node generate"); +} diff --git a/test/index.js b/test/index.js index 288e7a720..dab2fcdaa 100644 --- a/test/index.js +++ b/test/index.js @@ -10,4 +10,6 @@ var args = [ require("child_process").fork("../node_modules/istanbul/lib/cli.js", args, { cwd: __dirname +}).on("close", function(code) { + process.exit(code); }); diff --git a/test/tests/blob.js b/test/tests/blob.js index 06856cc03..c3d8f61ff 100644 --- a/test/tests/blob.js +++ b/test/tests/blob.js @@ -5,6 +5,7 @@ describe("Blob", function() { var reposPath = path.resolve("test/repos/workdir/.git"); var oid = "111dd657329797f6165f52f5085f61ac976dcf04"; + var Oid = require("../../lib/oid"); var Repository = require("../../lib/repository"); var FileMode = require("../../lib/tree_entry").FILEMODE; @@ -36,4 +37,11 @@ describe("Blob", function() { it("can determine if a blob is not a binary", function() { assert.equal(this.blob.filemode(), FileMode.BLOB); }); + + it("can get a blob with an Oid object", function() { + var oidObject = Oid.fromString(oid); + this.repository.getBlob(oidObject).then(function(blob) { + assert.equal(this.blob.id().toString(), oid); + }); + }); }); diff --git a/test/tests/merge.js b/test/tests/merge.js index 72b3d9c74..90d666c59 100644 --- a/test/tests/merge.js +++ b/test/tests/merge.js @@ -129,6 +129,233 @@ describe("Merge", function() { }); }); + it("can fast-forward using the convenience method", function() { + var ourFileName = "ourNewFile.txt"; + var theirFileName = "theirNewFile.txt"; + + var ourFileContent = "I like Toll Roads. I have an EZ-Pass!"; + var theirFileContent = "I'm skeptical about Toll Roads"; + + var ourSignature = nodegit.Signature.create + ("Ron Paul", "RonPaul@TollRoadsRBest.info", 123456789, 60); + var theirSignature = nodegit.Signature.create + ("Greg Abbott", "Gregggg@IllTollYourFace.us", 123456789, 60); + + var repository = this.repository; + var ourCommit; + var theirCommit; + var ourBranch; + var theirBranch; + + return fse.writeFile( + path.join(repository.workdir(), ourFileName), + ourFileContent) + // Load up the repository index and make our initial commit to HEAD + .then(function() { + return repository.openIndex() + .then(function(index) { + index.read(1); + index.addByPath(ourFileName); + index.write(); + + return index.writeTree(); + }); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "11ead82b1135b8e240fb5d61e703312fb9cc3d6a"); + + return repository.createCommit("HEAD", ourSignature, + ourSignature, "we made a commit", oid, []); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "91a183f87842ebb7a9b08dad8bc2473985796844"); + + return repository.getCommit(commitOid).then(function(commit) { + ourCommit = commit; + }).then(function() { + return repository.createBranch(ourBranchName, commitOid) + .then(function(branch) { + ourBranch = branch; + return repository.createBranch(theirBranchName, commitOid); + }); + }); + }) + .then(function(branch) { + theirBranch = branch; + return fse.writeFile(path.join(repository.workdir(), theirFileName), + theirFileContent); + }) + .then(function() { + return repository.openIndex() + .then(function(index) { + index.read(1); + index.addByPath(theirFileName); + index.write(); + + return index.writeTree(); + }); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "76631cb5a290dafe2959152626bb90f2a6d8ec94"); + + return repository.createCommit(theirBranch.name(), theirSignature, + theirSignature, "they made a commit", oid, [ourCommit]); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "0e9231d489b3f4303635fc4b0397830da095e7e7"); + + return repository.getCommit(commitOid).then(function(commit) { + theirCommit = commit; + }); + }) + .then(function() { + return repository.mergeBranches( + ourBranchName, + theirBranchName, + ourSignature); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "0e9231d489b3f4303635fc4b0397830da095e7e7"); + + return repository.getBranchCommit(ourBranchName) + .then(function(branchCommit) { + assert.equal(oid.toString(), branchCommit.toString()); + }); + }); + }); + + it("can merge cleanly using the convenience method", function() { + var initialFileName = "initialFile.txt"; + var ourFileName = "ourNewFile.txt"; + var theirFileName = "theirNewFile.txt"; + + var initialFileContent = "I'd like to drive somewhere"; + var ourFileContent = "I like Toll Roads. I have an EZ-Pass!"; + var theirFileContent = "I'm skeptical about Toll Roads"; + + var ourSignature = nodegit.Signature.create + ("Ron Paul", "RonPaul@TollRoadsRBest.info", 123456789, 60); + var theirSignature = nodegit.Signature.create + ("Greg Abbott", "Gregggg@IllTollYourFace.us", 123456789, 60); + + var repository = this.repository; + var initialCommit; + var ourCommit; + var theirCommit; + var ourBranch; + var theirBranch; + + return fse.writeFile( + path.join(repository.workdir(), initialFileName), + initialFileContent) + // Load up the repository index and make our initial commit to HEAD + .then(function() { + return repository.openIndex() + .then(function(index) { + index.read(1); + index.addByPath(initialFileName); + index.write(); + + return index.writeTree(); + }); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "21a553813e2f670815b649eef51eeadb253a5d0c"); + + return repository.createCommit("HEAD", ourSignature, + ourSignature, "initial commit", oid, []); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "af66a9c799a10a23319ee4318c8bb2021521f539"); + + return repository.getCommit(commitOid).then(function(commit) { + initialCommit = commit; + }).then(function() { + return repository.createBranch(ourBranchName, commitOid) + .then(function(branch) { + ourBranch = branch; + return repository.createBranch(theirBranchName, commitOid); + }); + }); + }) + .then(function(branch) { + theirBranch = branch; + }) + .then(function() { + return fse.writeFile(path.join(repository.workdir(), ourFileName), + ourFileContent); + }) + .then(function() { + return repository.openIndex() + .then(function(index) { + index.read(1); + index.addByPath(ourFileName); + index.write(); + + return index.writeTree(); + }); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "af60aa06b3537f75b427f6268a130c842c84a137"); + + return repository.createCommit(ourBranch.name(), ourSignature, + ourSignature, "we made a commit", oid, [initialCommit]); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "7ce31c05427659986d50abfb90c8f7db88ef4fa1"); + + return repository.getCommit(commitOid).then(function(commit) { + ourCommit = commit; + }); + }) + .then(function() { + return fse.writeFile(path.join(repository.workdir(), theirFileName), + theirFileContent); + }) + .then(function() { + return repository.openIndex() + .then(function(index) { + index.read(1); + index.addByPath(theirFileName); + index.write(); + + return index.writeTree(); + }); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "f007361737a2ca00a0e80fc2daf55064463173b4"); + + return repository.createCommit(theirBranch.name(), theirSignature, + theirSignature, "they made a commit", oid, [initialCommit]); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "b588f0eef1809226f8f7db542940749da15ae1de"); + + return repository.getCommit(commitOid).then(function(commit) { + theirCommit = commit; + }); + }) + .then(function() { + return repository.mergeBranches(ourBranchName, theirBranchName, + ourSignature); + }) + .then(function(commitId) { + assert.equal(commitId.toString(), + "5384feb481d9c29081b3a0c1478fcc24a3953efa"); + }); + }); + it("can merge 2 branchs with conflicts on a single file", function () { var baseFileContent = "All Bobs are created equal. ish.\n"; var ourFileContent = "Big Bobs are best, IMHO.\n"; diff --git a/test/tests/remote.js b/test/tests/remote.js index 1142fd192..af2354491 100644 --- a/test/tests/remote.js +++ b/test/tests/remote.js @@ -88,7 +88,7 @@ describe("Remote", function() { }); }); - it("can fetch from a remote", function() { + it("can download from a remote", function() { var repo = this.repository; return Remote.load(repo, "origin") @@ -102,4 +102,24 @@ describe("Remote", function() { assert(false); }); }); + + it("can fetch from a remote", function() { + return this.repository.fetch("origin", { + credentials: function(url, userName) { + return NodeGit.Cred.sshKeyFromAgent(userName); + } + }, true); + }); + + it("can fetch from all remotes", function() { + // Set a reasonable timeout here for the fetchAll test + this.timeout(15000); + + return this.repository.fetchAll({ + credentials: function(url, userName) { + return NodeGit.Cred.sshKeyFromAgent(userName); + } + }, true); + }); + }); diff --git a/test/tests/repository.js b/test/tests/repository.js index 3d1de0fe8..1f9b87af5 100644 --- a/test/tests/repository.js +++ b/test/tests/repository.js @@ -1,5 +1,7 @@ var assert = require("assert"); var path = require("path"); +var promisify = require("promisify-node"); +var fse = promisify(require("fs-extra")); describe("Repository", function() { var reposPath = path.resolve("test/repos/workdir/.git"); @@ -7,6 +9,7 @@ describe("Repository", function() { var Repository = require("../../lib/repository"); var Index = require("../../lib/index"); + var Signature = require("../../lib/signature"); before(function() { var test = this; @@ -40,6 +43,14 @@ describe("Repository", function() { }); }); + it("can utilize repository init options", function() { + return fse.remove(newRepo).then(function() { + return Repository.initExt(newRepo, { + flags: Repository.INIT_FLAG.MKPATH + }); + }); + }); + it("can read the index", function() { return this.repository.index().then(function(index) { assert.ok(index instanceof Index); @@ -48,8 +59,20 @@ describe("Repository", function() { it("can list remotes", function() { return this.repository.getRemotes().then(function(remotes) { - assert.equal(remotes.count(), 1); - assert.equal(remotes.strings(), "origin"); + assert.equal(remotes.length, 1); + assert.equal(remotes[0], "origin"); }); }); + + it("can get the current branch", function() { + return this.repository.getCurrentBranch().then(function(branch) { + assert.equal(branch.shorthand(), "master"); + }); + }); + + it("can get the default signature", function() { + var sig = this.repository.defaultSignature(); + + assert(sig instanceof Signature); + }); }); diff --git a/test/tests/signature.js b/test/tests/signature.js new file mode 100644 index 000000000..b43f36f06 --- /dev/null +++ b/test/tests/signature.js @@ -0,0 +1,35 @@ +var assert = require("assert"); + +describe("Signature", function() { + + var Signature = require("../../lib/signature"); + var name = "Bob Gnarley"; + var email = "gnarlee@bob.net"; + var arbitraryDate = 123456789; + var timezoneOffset = 60; + + it("can be created at an arbitrary time", function() { + var create = Signature.create; + var signature = create(name, email, arbitraryDate, timezoneOffset); + + assert.equal(signature.name(), name); + assert.equal(signature.email(), email); + assert.equal(signature.when().time(), arbitraryDate); + assert.equal(signature.when().offset(), 60); + + }); + + it("can be created now", function() { + + var signature = Signature.now(name, email); + var now = new Date(); + var when = signature.when(); + var diff = Math.abs(when.time() - now/1000); + assert.equal(signature.name(), name); + assert.equal(signature.email(), email); + assert(diff <= 1); + + // libgit2 does its timezone offsets backwards from javascript + assert.equal(when.offset(), -now.getTimezoneOffset()); + }); +}); diff --git a/test/tests/tag.js b/test/tests/tag.js index 7a8ba14d5..2a93cc1bb 100644 --- a/test/tests/tag.js +++ b/test/tests/tag.js @@ -5,6 +5,7 @@ describe("Tag", function() { var reposPath = path.resolve("test/repos/workdir/.git"); var Repository = require("../../lib/repository"); + var Tag = require("../../lib/tag"); var Obj = require("../../lib/object"); var Oid = require("../../lib/oid"); @@ -60,4 +61,14 @@ describe("Tag", function() { testTag(tag); }); }); + + it("can list tags in a repo", function() { + return Tag.list(this.repo).then(function(tagNames) { + tagNames = tagNames.filter(function(tagNameTest) { + return tagNameTest == tagName; + }); + + assert.equal(tagNames.length, 1); + }); + }); });