diff --git a/.astylerc b/.astylerc new file mode 100644 index 000000000..ef70741d4 --- /dev/null +++ b/.astylerc @@ -0,0 +1,6 @@ +--style=linux +--indent=spaces=2 +--attach-namespaces +--attach-classes +--attach-inlines +--attach-extern-c diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 000000000..5e17dfa98 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,11 @@ +root = true + +[*] +end_of_line = lf +insert_final_newline = true +indent_style = space +indent_size = 2 + +[Makefile] +indent_style = tab +indent_size = 4 diff --git a/.gitignore b/.gitignore index be86e88da..5529ab114 100644 --- a/.gitignore +++ b/.gitignore @@ -1,9 +1,27 @@ -/build/* -!/build/codegen/ - -/doc/* -!/doc/Theme.css - +/build/ +/coverage/ +/dist/ +/include/ +/lib/enums.js +/lib/nodegit.js /node_modules/ -/vendor/libgit2/ +/src/ +/test/coverage/ +/test/home/ /test/repos/ +/test/test/ + +/generate/output +/generate/**/*.json +!/generate/input/*.json + +/generate/missing-tests.json +/binding.gyp + + +*.log +.DS_STORE +.idea + +.vscode +jsconfig.json diff --git a/.gitmodules b/.gitmodules index ca6a48f0b..1e39a72bf 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +1,4 @@ [submodule "vendor/libgit2"] path = vendor/libgit2 - url = git://github.com/libgit2/libgit2.git +[submodule "vendor/libgit2"] + url = https://github.com/nodegit/libgit2.git diff --git a/.jshintrc b/.jshintrc new file mode 100644 index 000000000..0fd02f29b --- /dev/null +++ b/.jshintrc @@ -0,0 +1,31 @@ +{ + "boss": true, + "curly": true, + "eqnull": true, + "esnext": true, + "evil": true, + "futurehostile": true, + "globals": { + "after": true, + "afterEach": true, + "before": true, + "beforeEach": true, + "define": true, + "describe": true, + "global": true, + "it": true + }, + "immed": false, + "maxlen": 80, + "node": true, + "predef": [ + "Promise", + "Set" + ], + "proto": true, + "quotmark": "double", + "trailing": true, + "undef": true, + "unused": "vars", + "validthis": true +} diff --git a/.npmignore b/.npmignore index 1980fa872..046bdf56a 100644 --- a/.npmignore +++ b/.npmignore @@ -1,6 +1,27 @@ -docs/ -example/ -test/ -util/ -vendor/nodeunit -vendor/rimraf +/.travis/ +/build/ +/examples/ +/generate/ +/guides/ +/lib/ +/test/ +/vendor/Release/ + +!/include +!/src + +.astylerc +.editorconfig +.gitignore +.gitmodules +.jshintrc +.travis.yml +appveyor.yml + +!binding.gyp + +*.filters +*.log +*.md +*.sln +*.vcxproj diff --git a/.travis.yml b/.travis.yml index 0604aaedf..d17cd1fa9 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,14 +1,138 @@ -language: node_js -node_js: - - 0.8 - - "0.10" - - 0.11 -git: - depth: 1 branches: only: - master + - /^v\d+\.\d+\.\d+$/ + +compiler: clang + +os: + - linux + - osx + +sudo: false + +env: + matrix: + - export NODE_VERSION="0.12" TARGET_ARCH="x64" + - export NODE_VERSION="4.5" TARGET_ARCH="x64" + - export NODE_VERSION="5.12" TARGET_ARCH="x64" + - export NODE_VERSION="6.5" TARGET_ARCH="x64" + matrix: fast_finish: true - allow_failures: - - node_js: 0.11 + include: + - os: linux + env: export NODE_VERSION="0.12" TARGET_ARCH="ia32" + sudo: required + - os: linux + env: export NODE_VERSION="4.5" TARGET_ARCH="ia32" + sudo: required + - os: linux + env: export NODE_VERSION="5.12" TARGET_ARCH="ia32" + sudo: required + - os: linux + env: export NODE_VERSION="6.5" TARGET_ARCH="ia32" + sudo: required + +git: + depth: 1 + +addons: + apt: + sources: + - ubuntu-toolchain-r-test + packages: + - build-essential + - libssl-dev + - gcc-4.9-multilib + - g++-4.9-multilib + - lcov + +before_install: + - export CC=clang + - export CXX=clang++ + - export npm_config_clang=1 + + - if [ $TARGET_ARCH == "ia32" ]; then + sudo ln -s /usr/include/asm-generic /usr/include/asm; + fi + + - if [ $TRAVIS_OS_NAME != "linux" ]; then + git clone https://github.com/creationix/nvm.git ./.nvm; + source ./.nvm/nvm.sh; + fi + + - nvm install $NODE_VERSION + + - if [ -z "$TRAVIS_TAG" ] && [ $TRAVIS_OS_NAME == "linux" ] && [ $NODE_VERSION == "0.12" ]; then + export GYP_DEFINES="coverage=1 use_obsolete_asm=true"; + export JOBS=4; + export CC=/usr/bin/gcc-4.9; + export CXX=/usr/bin/g++-4.9; + export npm_config_clang=0; + wget http://downloads.sourceforge.net/ltp/lcov-1.10.tar.gz; + tar xvfz lcov-1.10.tar.gz; + else + export GYP_DEFINES="use_obsolete_asm=true"; + export JOBS=4; + fi + +# node 0.12 is sometimes failing +# with a "Callback called more than once" error +# Despite that error, it seems like modules do get installed correctly, +# and the rest of the build proceeds as normal. +# So we ignore that error, just for node 0.12 +# If npm install ever fails in a more significant way, +# the rest of the build should hopefully fail anyway. +install: + - if [[ $NODE_VERSION == "0.12" ]]; then + npm install || true; + else + npm install; + fi + +# This is a random private key used purely for testing. +before_script: + - echo -e "Host *\n\tStrictHostKeyChecking no\n" >> ~/.ssh/config + - echo -e "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDkTcgXnHuqR0gbwegnr9Zxz4hTkjjV/SpgJNPJz7mo/HKNbx0rqjj1P0yGR053R9GSFFim2ut4NK9DPPUkQdyucw+DoLkYRHJmlJ4BNa9NTCD0sl+eSXO2969kZojCYSOgbmkCJx8mdgTwhzdgE/jhBrsY0hPE6pRTlU+H68/zeNdJUAIJf0LLXOm3hpTKLA19VICltl/j9VvBJpgRHdBylXEyL8HokYpjkQQk1ZXj3m7Nlo8yDdg4VcljOJWC+Xh8kxRMfK5x/VRVsYKCQXN5QlzKeqf7USRDUS/7mFoPUBW+d4kwKtGxRsWuIL2yeqzifZUTOgsh9+ZWAWxWffQZ your_email@example.com" > ~/.ssh/id_rsa.pub + - echo -e "-----BEGIN RSA PRIVATE KEY-----\nMIIEpAIBAAKCAQEA5E3IF5x7qkdIG8HoJ6/Wcc+IU5I41f0qYCTTyc+5qPxyjW8d\nK6o49T9MhkdOd0fRkhRYptrreDSvQzz1JEHcrnMPg6C5GERyZpSeATWvTUwg9LJf\nnklztvevZGaIwmEjoG5pAicfJnYE8Ic3YBP44Qa7GNITxOqUU5VPh+vP83jXSVAC\nCX9Cy1zpt4aUyiwNfVSApbZf4/VbwSaYER3QcpVxMi/B6JGKY5EEJNWV495uzZaP\nMg3YOFXJYziVgvl4fJMUTHyucf1UVbGCgkFzeUJcynqn+1EkQ1Ev+5haD1AVvneJ\nMCrRsUbFriC9snqs4n2VEzoLIffmVgFsVn30GQIDAQABAoIBAQDPQm2sQbti0mN8\nD4Uawl8D40v30n8WhUa7EbPTOmlqKAQ2sfDhex9KRbTLEmEBmImA/Eee8o9iCTIy\n8Fv8Fm6pUHt9G6Pti/XvemwW3Q3QNpSUkHqN0FDkgecQVqVBEb6uHo3mDm4RFINX\neOmkp30BjIK9/blEw1D0sFALLOEUPaDdPMwiXtFgqfrFSgpDET3TvQIwZ2LxxTm0\ncNmP3sCSlZHJNkZI4hBEWaaXR+V5/+C1qblDCo5blAWTcX3UzqrwUUJgFi6VnBuh\n7S9Q6+CEIU+4JRyWQNmY8YgZFaAp6IOr/kyfPxTP1+UEVVgcLn3WDYwfG9og0tmz\nfzlruAgBAoGBAPfz73Pey86tNZEanhJhbX8gVjzy2hvyhT0paHg0q/H6c1VWOtUH\nOwZ3Ns2xAZqJhlDqCHnQYSCZDly042U/theP4N8zo1APb4Yg4qdmXF9QE1+2M03r\nkS6138gU/CSCLf8pCYa6pA/GmsaXxloeJGLvT4fzOZRsVav80/92XHRhAoGBAOu2\nmKh4Gr1EjgN9QNbk9cQTSFDtlBEqO/0pTepvL73UvNp/BAn4iYZFU4WnklFVBSWc\nL84Sc732xU12TAbTTUsa6E7W29pS8u7zVTxlIdQIIU5pzDyU1pNNk2kpxzte5p3Y\nPDtniPFsoYLWoH0LpsKL93t2pLAj+IOkE6f3XBq5AoGAIKaYo5N1FxQr952frx/x\nQUpK0N/R5Ng8v18SiLG26rhmM5iVSrQXC7TrHI7wfR8a9tC6qP/NqnM9NuwC/bQ0\nEEo7/GhaWxKNRwZRkmWiSFLNGk9t1hbtGU+N1lUdFtmloPIQdRNiw0kN3JTj474Q\nYI7O1EItFORnK6yxZfR6HEECgYEA1CT7MGUoa8APsMRCXyaiq15Pb8bjxK8mXquW\nHLEFXuzhLCW1FORDoj0y9s/iuKC0iS0ROX8R/J7k5NrbgikbH8WP36UxKkYNr1IC\nHOFImPTYRSKjVsL+fIUNb1DSp3S6SsYbL7v3XJJQqtlQiDq8U8x1aQFXJ9C4EoLR\nzhKrKsECgYBtU/TSF/TATZY5XtrN9O+HX1Fbz70Ci8XgvioheVI2fezOcXPRzDcC\nOYPaCMNKA5E8gHdg4s0TN7uDvKTJ+KhSg2V7gZ39A28dHrJaRX7Nz4k6t2uEBjX9\na1JidpAIbJ+3w7+hj6L299tVZvS+Y/6Dz/uuEQGXfJg/l/5CCvQPsA==\n-----END RSA PRIVATE KEY-----" > ~/.ssh/id_rsa + - chmod 600 ~/.ssh/id_rsa* + - eval `ssh-agent -s` + - ssh-add ~/.ssh/id_rsa + - git config --global user.name "John Doe" + - git config --global user.email johndoe@example.com + +script: + - if [ $TARGET_ARCH == "x64" ]; then + if [ -z "$TRAVIS_TAG" ] && [ $TRAVIS_OS_NAME == "linux" ] && [ $NODE_VERSION == "0.12" ]; then + npm test && npm run cov && npm run coveralls; + else + npm test; + fi + else + echo "Not running tests because the binary is not built for 64-bit systems"; + fi + +after_success: + - if [ -n "$TRAVIS_TAG" ]; then + npm install -g node-pre-gyp; + npm install -g aws-sdk; + node lifecycleScripts/clean; + node-pre-gyp package --target_arch=$TARGET_ARCH; + node-pre-gyp publish --target_arch=$TARGET_ARCH; + fi + + - if [ $TRAVIS_BRANCH == "master" ] && [ $TRAVIS_PULL_REQUEST == "false" ] && [ $TRAVIS_OS_NAME == "linux" ] && [ $NODE_VERSION == "4.1" ] && [ $TARGET_ARCH == "x64" ]; then + .travis/deploy-docs.sh; + fi + +notifications: + slack: + secure: KglNSqZiid9YudCwkPFDh+sZfW5BwFlM70y67E4peHwwlbbV1sSBPHcs74ZHP/lqgEZ4hMv4N2NI58oYFD5/1a+tKIQP1TkdIMuq4j2LXheuirA2HDcydOVrsC8kRx5XFGKdVRg/uyX2dlRHcOWFhxrS6yc6IxtxYWlRTD2SmEc= + + webhooks: + urls: + - https://webhooks.gitter.im/e/cbafdb27ad32ba746a73 + on_success: always # options: [always|never|change] default: always + on_failure: always # options: [always|never|change] default: always + on_start: false # default: false diff --git a/.travis/deploy-docs.sh b/.travis/deploy-docs.sh new file mode 100755 index 000000000..50ee50b2b --- /dev/null +++ b/.travis/deploy-docs.sh @@ -0,0 +1,34 @@ +#!/usr/bin/env bash + + +# delete "nodegit.github.com" folder if it exists +rm -rf "nodegit.github.com" || exit 0; + +# clone "nodegit.github.com" repository +git clone "https://github.com/nodegit/nodegit.github.com.git" + +# change into "nodegit.github.com" folder +cd "nodegit.github.com" + +# install node dependencies +npm install + +# link "nodegit" folder +ln -s ../.. generate/nodegit + +# generate new docs +node generate + +# configure git user information +git config user.name "Travis CI" +git config user.email "noreply@travis-ci.org" + +# commit changes +git add . +git commit -m "Deploy to GitHub Pages + +see https://github.com/nodegit/nodegit.github.com/commit/${TRAVIS_COMMIT}" +git tag "${TRAVIS_COMMIT}" + +# push to the "nodegit.github.com" repository +git push --quiet "https://${GH_TOKEN}@github.com/nodegit/nodegit.github.com.git" master "${TRAVIS_COMMIT}" > /dev/null 2>&1 diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 000000000..3e2e82f05 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,1527 @@ +# Change Log + +## v0.15.1 [(2016-06-20)](https://github.com/nodegit/nodegit/releases/tag/v0.15.1) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.15.0...v0.15.1) + +- Fix postinstall breaking build if it fails. + +## v0.14.1 [(2016-06-20)](https://github.com/nodegit/nodegit/releases/tag/v0.14.1) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.14.0...v0.14.1) + +- Fix postinstall breaking build if it fails. + +## v0.15.0 [(2016-06-20)](https://github.com/nodegit/nodegit/releases/tag/v0.15.0) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.14.0...v0.15.0) + +- Update to libgit2 @ 37dba1a [PR #1041](https://github.com/nodegit/nodegit/pull/1041) + +This updates NodeGit to use the latest `HEAD` version of libgit2. The plan for staying on the official tagged releases of libgit2 is that they will get a maintenance branch and not-breaking API fixes will be backported to them. The first branch of this sort is `maint/0.14`. Going forward new releases of NodeGit will follow closely to the `master` branch of libgit2. + +Summary of changes that were brought in: + +https://github.com/libgit2/libgit2/commit/37dba1a739b5ee6c45dc9f3c0bd1f7f7a18f13f7 +------- + +### Changes or improvements + +* `NodeGit.FetchOptions`, and `NodeGit.PushOptions` now have a `proxyOpts` field that accepts a `NodeGit.ProxyOptions` object that allows NodeGit to use a proxy for all remote communication + +* `NodeGit.MergeOptions` has a `defaultDriver` field that lets the caller change the driver used to when both sides of a merge have changed + +### API additions + +* `Commit.createWithSignature` allows the caller to create a signed commit. There are no tests for this currently so it's labelled experimental. + +* `Blob`, `Commit`, `Tag`, and `Tree` all have a new prototype `dup` method on them to make a low-level copy of the libgit2 object if needed. + +* `Odb#expandIds` is exposed which takes in a list of short ids and expands them in-place to the full id of the object in the database + +## v0.14.0 [(2016-06-20)](https://github.com/nodegit/nodegit/releases/tag/v0.14.0) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.13.2...v0.14.0) + +- Improve lifecycle scripts and install process [PR #1055](https://github.com/nodegit/nodegit/pull/1055) +- Fix example code [PR #1058](https://github.com/nodegit/nodegit/pull/1058) + +## v0.13.2 [(2016-06-09)](https://github.com/nodegit/nodegit/releases/tag/v0.13.2) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.13.1...v0.13.2) + +- Stop `RevWalk#walk` from swallowing errors in the callback [PR #1047](https://github.com/nodegit/nodegit/pull/1047) +- Stop swallowing errors in the install script [PR #1048](https://github.com/nodegit/nodegit/pull/1048) +- Fix initializing submodules when installing from npm [PR #1050](https://github.com/nodegit/nodegit/pull/1050) + +## v0.13.1 [(2016-06-03)](https://github.com/nodegit/nodegit/releases/tag/v0.13.1) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.13.0...v0.13.1) + +## Added + +- `Repository#discardLines` is now a thing [PR #1021](https://github.com/nodegit/nodegit/pull/1021) + +## Modified + +- Async methods now use a custom threadpool to prevent thread-locking the event loop [PR #1019](https://github.com/nodegit/nodegit/pull/1019) + +## Bug fixes + +- Fix building NodeGit from NPM [PR #1026](https://github.com/nodegit/nodegit/pull/1026) +- Plug a memory leak in `RevWalk.fastWalk` [PR #1030](https://github.com/nodegit/nodegit/pull/1030) +- Plug a memory leak with `Oid` [PR #1033](https://github.com/nodegit/nodegit/pull/1033) +- Fixed some underlying libgit2 objects getting freed incorrectly [PR #1036](https://github.com/nodegit/nodegit/pull/1036) + +## v0.13.0 [(2016-05-04)](https://github.com/nodegit/nodegit/releases/tag/v0.13.0) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.12.2...v0.13.0) + +## Summary + +This is a big update! Lots of work was done to bring NodeGit up to the latest stable libgit2 version (v0.24.1), to use babel in the library, to make it more stable, remove memory leaks, squash bugs and in general just improve the library for all. Make sure to see all of the API changes below (there are a lot). + +## Node support + +We have added Node 6 as a supported platform! Going forward we aim to have 1:1 support for versions of Node that are either current or LTS. That means that v0.12 will not be supported soon so if you're on that please upgrade to at least Node v4. Also Node v5 will *NOT* be LTS so when Node stops supporting that in the coming months we will as well. You can read more about the current Node upgrade plan [here](https://nodejs.org/en/blog/release/v6.0.0/). + +## API Changes +------- + +### Modified + +- `Index#add`, `Index#addByPath`, `Index#clear`, `Index#conflictAdd`, `Index#conflictCleanup`, `Index#conflictGet`, `Index#conflictRemove`, `Index.open`, `Index#read`, `Index#readTree`, `Index#remove`, `Index#removeByPath`, `Index#removeDirectory`, `Index#read`, `Index#write`, `Index#writeTree`, and `Index#writeTreeTo` are all now asynchronous functions [PR #971](https://github.com/nodegit/nodegit/pull/971) +- Made `ancestoryEntry`, `outEntry` and `theirEntry` optional parameters on `Index#conflictAdd` [PR #997](https://github.com/nodegit/nodegit/pull/997) +- `Repository#refreshIndex` will return an Index object back that has the latest data loaded off of disk [PR #986](https://github.com/nodegit/nodegit/pull/986) +- `Commit.create` is now asynchronous [PR #1022](https://github.com/nodegit/nodegit/pull/1022) + +### Added + +- `Diff#merge` will combine a diff into itself [PR #1000](https://github.com/nodegit/nodegit/pull/1000) +- `ReflogEntry#committer`, `ReflogEntry#idNew`, `ReflogEntry#idOld`, and `ReflogEntry#message` have been added +[PR #1013](https://github.com/nodegit/nodegit/pull/1013) + +### Removed + +- `Repository#openIndex` [PR #990](https://github.com/nodegit/nodegit/pull/990) +- `Reflog#entryCommitter`, `Reflog#entryIdNew`, `Reflog#entryIdOld`, and `Reflog#entryMessage` have been moved to be under `ReflogEntry` +[PR #1013](https://github.com/nodegit/nodegit/pull/1013) + +### Bug fixes + +- `Branch.name` works now [PR #991](https://github.com/nodegit/nodegit/pull/991) +- Fixed a crash with callbacks from libgit2 [PR #944](https://github.com/nodegit/nodegit/pull/944) +- Fixed a crash in `Tree#entryByName` [PR #998](https://github.com/nodegit/nodegit/pull/998) +- More memory leaks have been plugged [PR #1005](https://github.com/nodegit/nodegit/pull/1005), [PR #1006](https://github.com/nodegit/nodegit/pull/1006), [PR #1014](https://github.com/nodegit/nodegit/pull/1014), and [PR #1015](https://github.com/nodegit/nodegit/pull/1015) +- `Commit#getDiffWithOptions` now actually passes the options correctly [PR #1008](https://github.com/nodegit/nodegit/pull/1008) + +## Upgraded to libgit2 v0.24.1 [PR #1010](https://github.com/nodegit/nodegit/pull/1010) +------- + +### Changes or improvements + +- Custom merge drivers can now be registered, which allows callers to + configure callbacks to honor `merge=driver` configuration in + `.gitattributes`. + +- Custom filters can now be registered with wildcard attributes, for + example `filter=*`. Consumers should examine the attributes parameter + of the `check` function for details. + +- Symlinks are now followed when locking a file, which can be + necessary when multiple worktrees share a base repository. + +- You can now set your own user-agent to be sent for HTTP requests by + using the `Libgit2.OPT.SET_USER_AGENT` with `Libgit2.opts()`. + +- You can set custom HTTP header fields to be sent along with requests + by passing them in the fetch and push options. + +- Tree objects are now assumed to be sorted. If a tree is not + correctly formed, it will give bad results. This is the git approach + and cuts a significant amount of time when reading the trees. + +- Filter registration is now protected against concurrent + registration. + +- Filenames which are not valid on Windows in an index no longer cause + to fail to parse it on that OS. + +- Rebases can now be performed purely in-memory, without touching the + repository's workdir. + +- When adding objects to the index, or when creating new tree or commit + objects, the inputs are validated to ensure that the dependent objects + exist and are of the correct type. This object validation can be + disabled with the `Libgit2.OPT.ENABLE_STRICT_OBJECT_CREATION` option. + +- The WinHTTP transport's handling of bad credentials now behaves like + the others, asking for credentials again. + +### API additions + +- `Blob.createFromStream()` and + `Blob.createFromStreamCommit` allow you to create a blob by + writing into a stream. Useful when you do not know the final size or + want to copy the contents from another stream. + +- `Config#lock` has been added, which allow for + transactional/atomic complex updates to the configuration, removing + the opportunity for concurrent operations and not committing any + changes until the unlock. + +- `DiffOptions` added a new callback `progress_cb` to report on the + progress of the diff as files are being compared. The documentation of + the existing callback `notify_cb` was updated to reflect that it only + gets called when new deltas are added to the diff. + +- `FetchOptions` and `PushOptions` have gained a `customHeaders` + field to set the extra HTTP header fields to send. + +- `Commit#headerField` allows you to look up a specific header + field in a commit. + +### Breaking API changes + +- `MergeOptions` now provides a `defaultDriver` that can be used + to provide the name of a merge driver to be used to handle files changed + during a merge. + +- The `Merge.TREE_FLAG` is now `Merge.FLAG`. Subsequently, + `treeFlags` field of the `MergeOptions` structure is now named `flags`. + +- The `Merge.FILE_FLAGS` enum is now `Merge.FILE_FLAG` for + consistency with other enum type names. + +- `Cert` descendent types now have a proper `parent` member + +- It is the responsibility of the refdb backend to decide what to do + with the reflog on ref deletion. The file-based backend must delete + it, a database-backed one may wish to archive it. + +- `Index#add` and `Index#conflictAdd` will now use the case + as provided by the caller on case insensitive systems. Previous + versions would keep the case as it existed in the index. This does + not affect the higher-level `Index#addByPath` or + `Index#addFromBuffer` functions. + +- The `Config.LEVEL` enum has gained a higher-priority value + `PROGRAMDATA` which represent a rough Windows equivalent + to the system level configuration. + +- `RebaseOptions` now has a `mergeOptions` field. + +- The index no longer performs locking itself. This is not something + users of the library should have been relying on as it's not part of + the concurrency guarantees. + +- `Remote#connect()` now takes a `customHeaders` argument to set + the extra HTTP header fields to send. + +- `Tree.entryFilemode`, `Tree.entryFilemodeRaw`, `Tree.entryId`, `Tree.entryName`, + `Tree.entryToObject`, and `Tree.entryType` have all been moved to the `TreeEntry` prototype. + Additionally, the `TreeEntry` fields have been removed in lieu of the corresponding functions to return + the data. + +## v0.12.2 [(2016-04-07)](https://github.com/nodegit/nodegit/releases/tag/v0.12.2) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.12.1...v0.12.2) + +## Added + +- We now provide 32-bit binaries for linux [PR #980](https://github.com/nodegit/nodegit/pull/980) + +## Bug fixes + +- Added memory clean up for references [PR #977](https://github.com/nodegit/nodegit/pull/977) and remotes [PR #981](https://github.com/nodegit/nodegit/pull/981) + +## v0.12.1 [(2016-03-30)](https://github.com/nodegit/nodegit/releases/tag/v0.12.1) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.12.0...v0.12.1) + +## Bug fixes + +- Fixed post install script dying on windows [PR #978](https://github.com/nodegit/nodegit/pull/978) + +## v0.12.0 [(2016-03-28)](https://github.com/nodegit/nodegit/releases/tag/v0.12.0) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.11.9...v0.12.0) + +## API changes +- `Ignore` + - Made `Ignore.pathIsIgnored` async [PR #970](https://github.com/nodegit/nodegit/pull/970) + +## Bug fixes + +- Added an error message when trying to install NodeGit without a required version of libstdc++ [PR #972](https://github.com/nodegit/nodegit/pull/972) +- Fix a crash when grabbing content out of a buffer that has unicode [PR #966](https://github.com/nodegit/nodegit/pull/966) +- Added some plumbing for better memory management [PR #958](https://github.com/nodegit/nodegit/pull/958) +- Fix `checkoutOptions` in `Stash#apply` [PR #956](https://github.com/nodegit/nodegit/pull/956) +- Fixed install when there is a space in the username on windows [PR #951](https://github.com/nodegit/nodegit/pull/951) +- Bump to nan@2.2.0 [PR #952](https://github.com/nodegit/nodegit/pull/952) + +## v0.11.9 [(2016-03-09)](https://github.com/nodegit/nodegit/releases/tag/v0.11.9) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.11.8...v0.11.9) + +- Fixed crash when calculating diff via `ConvenientPatch` [PR #945](https://github.com/nodegit/nodegit/pull/945) + +## v0.11.8 [(2016-03-07)](https://github.com/nodegit/nodegit/releases/tag/v0.11.8) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.11.7...v0.11.8) + +- Removed callback throttling due to segmentation faults. Will be implemented later. [PR #943](https://github.com/nodegit/nodegit/pull/943) + +## v0.11.7 [(2016-03-07)](https://github.com/nodegit/nodegit/releases/tag/v0.11.7) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.11.6...v0.11.7) + +- Added `Repository#mergeheadForeach` [PR #937](https://github.com/nodegit/nodegit/pull/937) +- Improved speed of all callbacks dramatically [PR #932](https://github.com/nodegit/nodegit/pull/932) +- Fixed `Merge.merge` docs to show it takes in an `AnnotatedCommit` and not a `Commit` [PR #935](https://github.com/nodegit/nodegit/pull/935) +- Fixed unicode in `Diff.blobToBuffer` getting corrupted [PR #935](https://github.com/nodegit/nodegit/pull/935) +- Fixed fetching/pulling to bitbucket in versions > v5.6 of node [PR #942](https://github.com/nodegit/nodegit/pull/942) + +## v0.11.6 [(2016-03-01)](https://github.com/nodegit/nodegit/releases/tag/v0.11.6) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.11.5...v0.11.6) + +- Added `Repository#checkoutRef` [PR #891](https://github.com/nodegit/nodegit/pull/891) +- `Repository#createCommitOnHead` no longer dies if the repo is empty [PR #927](https://github.com/nodegit/nodegit/pull/927) +- Fixed memory leak in `Patch#convenientFromDiff` [PR #930](https://github.com/nodegit/nodegit/pull/930) +- Generated files now have a header comment indicating that they are generated [PR #924](https://github.com/nodegit/nodegit/pull/924) +- Fixed http parsing errors in Node 5.6 [PR #931](https://github.com/nodegit/nodegit/pull/931) +- Fixed `Tree#walk` not returning the correct entries on `end` [PR #929](https://github.com/nodegit/nodegit/pull/929) + +## v0.11.5 [(2016-02-25)](https://github.com/nodegit/nodegit/releases/tag/v0.11.5) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.11.4...v0.11.5) + +- Fixed crash when calculating a diff [PR #922](https://github.com/nodegit/nodegit/pull/922) +- Fixed an issue with return values getting randomly corrupted [PR #923](https://github.com/nodegit/nodegit/pull/923)) + +## v0.11.4 [(2016-02-24)](https://github.com/nodegit/nodegit/releases/tag/v0.11.4) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.11.3...v0.11.4) + +- Fixed callback out values in callbacks from C++. This affects any NodeGit call that is passed a callback as an option [PR #921](https://github.com/nodegit/nodegit/pull/921) +- Fixed an issue with building the debug version of NodeGit on windows [PR #918](https://github.com/nodegit/nodegit/pull/918) + +## v0.11.3 [(2016-02-22)](https://github.com/nodegit/nodegit/releases/tag/v0.11.3) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.11.2...v0.11.3) + +- Fixed an issue where initializing NodeGit would sometimes seg fault. Also fixed an error when fetching concurrently [PR #912](https://github.com/nodegit/nodegit/pull/912) + +## v0.11.2 [(2016-02-18)](https://github.com/nodegit/nodegit/releases/tag/v0.11.2) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.11.1...v0.11.2) + +- Fixed an issue where when staging lines if the index is locked NodeGit just nuked it [PR #906](https://github.com/nodegit/nodegit/pull/906) +- Fixed diff calculation when staging lines/hunks [PR #906](https://github.com/nodegit/nodegit/pull/906) +- Fixed seg-fault in linux that happens when getting the diff of very small files [PR #908](https://github.com/nodegit/nodegit/pull/908) +- Fixed `RevWalk#fastWalk` dying when an error happens in libgit2 [PR #909](https://github.com/nodegit/nodegit/pull/909) + +## v0.11.1 [(2016-02-09)](https://github.com/nodegit/nodegit/releases/tag/v0.11.1) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.11.0...v0.11.1) + +- Numerous fixes and perf boosts to file history [PR #900](https://github.com/nodegit/nodegit/pull/900)[PR #896](https://github.com/nodegit/nodegit/pull/896) +- Several doc fixes [PR #899](https://github.com/nodegit/nodegit/pull/899)[PR #897](https://github.com/nodegit/nodegit/pull/897) + +## v0.11.0 [(2016-02-04)](https://github.com/nodegit/nodegit/releases/tag/v0.11.0) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.10.0...v0.11.0) + +- Change `Revert.commit` and `Revert.revert` to by async. [PR #887](https://github.com/nodegit/nodegit/pull/887) +- Added `RevWalk#fileHistoryWalk` for a faster way to retrieve history for a specific file. [PR #889](https://github.com/nodegit/nodegit/pull/889) + +## v0.10.0 [(2016-02-01)](https://github.com/nodegit/nodegit/releases/tag/v0.10.0) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.9.0...v0.10.0) + +- Clean mutexes are part of GC. No longer leaves processes running after the script ends [PR #880](https://github.com/nodegit/nodegit/pull/880) +- Increased the performance of `ConvenientPatch` by an order of magnitude [PR #883](https://github.com/nodegit/nodegit/pull/883) + +# API changes +- `ConvenientPatch` + - `ConvenientPatch` does not have a `patch` or a `delta` property associated with it, if you were using the `delta`, please just use prototype methods `oldFile`, `newFile`, and `Status`, which are stripped directly from the `delta`. + - `ConvenientPatch#hunks` returns a promise with an array of `ConvenientHunks`. +- `ConvenientHunk` + - `ConvenientHunk` does not have an exposed diffHunk associated with it, but does have the same members as diffHunk: + - `size()` : number of lines in the hunk + - `oldStart()` : old starting position + - `oldLines()` : number of lines in old file + - `newStart()` : new starting position + - `newLines()` : number of lines in new file + - `headerLen()` : length of header + - `header()` : returns the header of the hunk + - `lines()` : returns a promise containing `DiffLines`, not `ConvenientLines`. +- `DiffLine` +- `DiffLine` now contains the members `rawContent()` and `content()`. + - `rawContent()` contains the unformatted content of the line. This is no longer a string from the line to the end of the file. + - `content()` contains the utf8 formatted content of the line. + +## v0.9.0 [(2016-01-21)](https://github.com/nodegit/nodegit/releases/tag/v0.9.0) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.8.0...v0.9.0) + +- Thread safe fix to stop crashing on releasing mutexes [PR #876](https://github.com/nodegit/nodegit/pull/876) +- `Submodule#setIgnore`, `Submodule#setUpdate`, and `Submodule#setUrl` are now all async. `Submodule#status` and `Submodule#location` are now available [PR #867](https://github.com/nodegit/nodegit/pull/867) and [PR #870](https://github.com/nodegit/nodegit/pull/870) +- `Remote#defaultBranch` is now available [PR #872](https://github.com/nodegit/nodegit/pull/872) +- `Repository#mergeBranches` now takes in a `MergeOptions` parameter [PR #873](https://github.com/nodegit/nodegit/pull/873) +- Remove a NodeGit specific hack to make `Index#addAll` faster since that is fixed in libgit2 [PR #875](https://github.com/nodegit/nodegit/pull/875)) + +## v0.8.0 [(2016-01-15)](https://github.com/nodegit/nodegit/releases/tag/v0.8.0) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.7.0...0.8.0) + +- Thread safe locking has been added and currently is defaulted to off. Use `NodeGit.enableThreadSafety()` to turn on +- NodeGit no longer requires a specific Promise object from the `nodegit-promise` library to be passed in. You can now use whatever you want! +- `Repository#stageFilemode` now can accept an array of strings for files to update +- `Submodule#addToIndex`, `Submodule#addFinalize`, `Submodule#init`, `Submodule#open`, `Submodule#sync`, and `Submodule#update` are now all async methodss + +## v0.7.0 [(2016-01-08)](https://github.com/nodegit/nodegit/releases/tag/v0.7.0) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.6.3...0.7.0) + +- Bumped openssl to 1.0.2e to fix issues with prebuilts on linux platforms +- Fixed a bug with GIT_ITER_OVER breaking rebase and other iterative methods +- Make GraphDescendentOf asynchronous +- Fixed line length of utf8 stringss + +## v0.6.3 [(2015-12-16)](https://github.com/nodegit/nodegit/releases/tag/v0.6.3) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.6.2...0.6.3) + + - Fixed a bug where manually building for vanilla node would fail without explicitly + setting the target + +## v0.6.2 [(2015-12-16)](https://github.com/nodegit/nodegit/releases/tag/v0.6.2) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.6.1...0.6.2) + + - Fixed a bug where manually building on windows would fail (if unable to download a prebuilt binary) + +## v0.6.1 [(2015-12-14)](https://github.com/nodegit/nodegit/releases/tag/v0.6.1) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.6.0...0.6.1) + + - Fixed Treebuilder.create to have an optional source + - Added Repository.getSubmoduleNames + - Added Submodule.Foreach + +## v0.6.0 [(2015-12-08)](https://github.com/nodegit/nodegit/releases/tag/v0.6.0) + + - Added file mode staging + - Added a fast rev walk to do the rev walk in C++ and bubble the result up to JS + - Updated to latest libgit2 + - Updated to latest openssl + - Updated to latest nodegit-promise + - Removed c++11 dependency + - Fixed weirdness in lifecycle scripts + - Added downloading prebuilt binaries for electron + +## v0.4.1 [(2015-06-02)](https://github.com/nodegit/nodegit/tree/0.4.1) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.4.0...0.4.1) + +**Closed issues:** + +- Error: Module did not self-register [\#593](https://github.com/nodegit/nodegit/issues/593) + +- A guide on how to create a new branch, switch to it and delete it. [\#588](https://github.com/nodegit/nodegit/issues/588) + +- A way to get "gone" branches [\#583](https://github.com/nodegit/nodegit/issues/583) + +- Missing documentation pages for BranchIterator and NodeIterator [\#581](https://github.com/nodegit/nodegit/issues/581) + +- ELIFECYCLE error on `npm rebuild` [\#578](https://github.com/nodegit/nodegit/issues/578) + +- npm rebuild fails \(lifecycleScripts/clean should not delete lifecycleScripts!\) [\#576](https://github.com/nodegit/nodegit/issues/576) + +- Unable to compile and install v0.4.0 on Windows [\#575](https://github.com/nodegit/nodegit/issues/575) + +- Doesn't work with Electron [\#574](https://github.com/nodegit/nodegit/issues/574) + +- Doesn't work with io.js 2.x [\#573](https://github.com/nodegit/nodegit/issues/573) + +- Getting an exception during a fetchAll in defaultSignature in repository.js [\#572](https://github.com/nodegit/nodegit/issues/572) + +- tree\_entry path function not working when calling getEntry with a path including subdir [\#570](https://github.com/nodegit/nodegit/issues/570) + +- Build is broken on windows [\#565](https://github.com/nodegit/nodegit/issues/565) + +- Cloning git sub modules using "nodegit" npm module [\#560](https://github.com/nodegit/nodegit/issues/560) + +- How to get remote latest commit? [\#559](https://github.com/nodegit/nodegit/issues/559) + +- npm install fails for nw.js [\#558](https://github.com/nodegit/nodegit/issues/558) + +- nodegit and nw.js [\#557](https://github.com/nodegit/nodegit/issues/557) + +**Merged pull requests:** + +- Cherrypick tests [\#595](https://github.com/nodegit/nodegit/pull/595) ([jdgarcia](https://github.com/jdgarcia)) + +- Fix for issue \#591. TreeEntry.path\(\) throws when TreeEntry came from Tree.entries\(\) [\#592](https://github.com/nodegit/nodegit/pull/592) ([tomruggs](https://github.com/tomruggs)) + +- Standard merge [\#589](https://github.com/nodegit/nodegit/pull/589) ([jdgarcia](https://github.com/jdgarcia)) + +- Add `git\_index\_conflict\_get` and test [\#586](https://github.com/nodegit/nodegit/pull/586) ([johnhaley81](https://github.com/johnhaley81)) + +- Bump nan [\#584](https://github.com/nodegit/nodegit/pull/584) ([johnhaley81](https://github.com/johnhaley81)) + +- Fix CI false positives [\#582](https://github.com/nodegit/nodegit/pull/582) ([johnhaley81](https://github.com/johnhaley81)) + +- Added NodeGit.Checkout.index [\#579](https://github.com/nodegit/nodegit/pull/579) ([jdgarcia](https://github.com/jdgarcia)) + +- Check for existence to avoid throwing an error when there is no default signature [\#577](https://github.com/nodegit/nodegit/pull/577) ([tomruggs](https://github.com/tomruggs)) + +- Fix path function in tree\_entry \(fix for issue \#570\) [\#571](https://github.com/nodegit/nodegit/pull/571) ([jfremy](https://github.com/jfremy)) + +- Fix Rebase issues [\#568](https://github.com/nodegit/nodegit/pull/568) ([jdgarcia](https://github.com/jdgarcia)) + +- Fix build issues with 0.4.0 [\#566](https://github.com/nodegit/nodegit/pull/566) ([maxkorp](https://github.com/maxkorp)) + +- stop cleaning on post-install [\#562](https://github.com/nodegit/nodegit/pull/562) ([maxkorp](https://github.com/maxkorp)) + +## v0.4.0 [(2015-05-07)](https://github.com/nodegit/nodegit/tree/v0.4.0) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.3.3...v0.4.0) + +**Closed issues:** + +- Error installing nodegit as dependency of an atom-shell app [\#556](https://github.com/nodegit/nodegit/issues/556) + +- New version of nan is breaking compile [\#554](https://github.com/nodegit/nodegit/issues/554) + +- Install error from openssl [\#551](https://github.com/nodegit/nodegit/issues/551) + +- How to get Tag instance by tag\_name? [\#543](https://github.com/nodegit/nodegit/issues/543) + +- ELIFECYCLE Error on install [\#540](https://github.com/nodegit/nodegit/issues/540) + +- Remote.delete returns -3 [\#539](https://github.com/nodegit/nodegit/issues/539) + +- Repository.init should accept boolean value for is\_bare [\#538](https://github.com/nodegit/nodegit/issues/538) + +- getStatus hangs [\#537](https://github.com/nodegit/nodegit/issues/537) + +- Unable to compile or install with npm install nodegit [\#536](https://github.com/nodegit/nodegit/issues/536) + +- `options` not reusable, nodegit destroys it [\#533](https://github.com/nodegit/nodegit/issues/533) + +- 'Error: 'directory' exists and is not an empty directory' \(but it doesn't exist\) [\#530](https://github.com/nodegit/nodegit/issues/530) + +- hey !:-\) problem with Branch.iteratorNew \(support\) [\#528](https://github.com/nodegit/nodegit/issues/528) + +- hey !:-\) problem with Branch.iteratorNew [\#527](https://github.com/nodegit/nodegit/issues/527) + +- hey !:-\) problem with Branch.iteratorNew [\#526](https://github.com/nodegit/nodegit/issues/526) + +- hey !:-\) problem with Branch.iteratorNew [\#525](https://github.com/nodegit/nodegit/issues/525) + +- Error: Reference 'refs/remotes/user/foo/HEAD' not found [\#523](https://github.com/nodegit/nodegit/issues/523) + +- Path issues windows [\#522](https://github.com/nodegit/nodegit/issues/522) + +- Issues on scientific linux 6.6 [\#521](https://github.com/nodegit/nodegit/issues/521) + +- It's looking for node-typ under `/Users/johnh/.node-gyp` [\#518](https://github.com/nodegit/nodegit/issues/518) + +- Not working with iojs [\#516](https://github.com/nodegit/nodegit/issues/516) + +- Cred.sshKeyNew not working: Too many redirects or authentication replays [\#511](https://github.com/nodegit/nodegit/issues/511) + +- Open a Repo from a subfolder [\#509](https://github.com/nodegit/nodegit/issues/509) + +- Create git-like CLI [\#508](https://github.com/nodegit/nodegit/issues/508) + +- Cannot create an instance of Packbuilder [\#507](https://github.com/nodegit/nodegit/issues/507) + +- Cannot find module '../build/Debug/nodegit' [\#506](https://github.com/nodegit/nodegit/issues/506) + +- Bug with oid implicit cast inside C++ [\#501](https://github.com/nodegit/nodegit/issues/501) + +- Failed to `require` on Ubuntu 12.04LTS [\#493](https://github.com/nodegit/nodegit/issues/493) + +- Enable `git\_config` [\#449](https://github.com/nodegit/nodegit/issues/449) + +- Pull example doesn't fully update the index [\#389](https://github.com/nodegit/nodegit/issues/389) + +**Merged pull requests:** + +- There is an incompatibility with NaN 1.8.x, keeping 1.7.x for now. [\#552](https://github.com/nodegit/nodegit/pull/552) ([wiggzz](https://github.com/wiggzz)) + +- A wrapper for git\_diff\_blob\_to\_buffer [\#550](https://github.com/nodegit/nodegit/pull/550) ([bleathem](https://github.com/bleathem)) + +- Update to 0.4.0 [\#548](https://github.com/nodegit/nodegit/pull/548) ([tbranyen](https://github.com/tbranyen)) + +- Removed the superflous "line" argument [\#547](https://github.com/nodegit/nodegit/pull/547) ([bleathem](https://github.com/bleathem)) + +- This fixes polling sync promises in callbacks. [\#546](https://github.com/nodegit/nodegit/pull/546) ([johnhaley81](https://github.com/johnhaley81)) + +- Add get/set config string methods and tests [\#545](https://github.com/nodegit/nodegit/pull/545) ([johnhaley81](https://github.com/johnhaley81)) + +- Make `Remote.delete` async and return error messages correctly [\#544](https://github.com/nodegit/nodegit/pull/544) ([johnhaley81](https://github.com/johnhaley81)) + +- Bump "nodegit-promise" version [\#542](https://github.com/nodegit/nodegit/pull/542) ([johnhaley81](https://github.com/johnhaley81)) + +- Introduced a new ConvenientLine class to wrap the lines returned from ConvenientHunk. [\#541](https://github.com/nodegit/nodegit/pull/541) ([bleathem](https://github.com/bleathem)) + +- Fix some things missed by the generating scripts [\#535](https://github.com/nodegit/nodegit/pull/535) ([johnhaley81](https://github.com/johnhaley81)) + +- Attempt remove the delete keyword [\#534](https://github.com/nodegit/nodegit/pull/534) ([tbranyen](https://github.com/tbranyen)) + +- Fix freeing a `GitOid` that was passed as a string [\#531](https://github.com/nodegit/nodegit/pull/531) ([johnhaley81](https://github.com/johnhaley81)) + +- fix typo: "byes" [\#529](https://github.com/nodegit/nodegit/pull/529) ([rutsky](https://github.com/rutsky)) + +- Add convenience methods to status file [\#524](https://github.com/nodegit/nodegit/pull/524) ([maxkorp](https://github.com/maxkorp)) + +- Lots of complaints of missing build/Debug/nodegit [\#520](https://github.com/nodegit/nodegit/pull/520) ([tbranyen](https://github.com/tbranyen)) + +- Add `Graph.aheadBehind` and tests [\#517](https://github.com/nodegit/nodegit/pull/517) ([johnhaley81](https://github.com/johnhaley81)) + +- Update to use libgit2 v0.22.2 [\#515](https://github.com/nodegit/nodegit/pull/515) ([johnhaley81](https://github.com/johnhaley81)) + +- Add `Repository.prototype.fetchheadForeach` and tests [\#514](https://github.com/nodegit/nodegit/pull/514) ([johnhaley81](https://github.com/johnhaley81)) + +- Converted create methods to be synchronous [\#513](https://github.com/nodegit/nodegit/pull/513) ([tbranyen](https://github.com/tbranyen)) + +- Fix atom-shell build on windows [\#512](https://github.com/nodegit/nodegit/pull/512) ([johnhaley81](https://github.com/johnhaley81)) + +- Update Checkout and Merge [\#505](https://github.com/nodegit/nodegit/pull/505) ([orderedlist](https://github.com/orderedlist)) + +- Add note tests [\#504](https://github.com/nodegit/nodegit/pull/504) ([tbranyen](https://github.com/tbranyen)) + +- Revert "Guide navigation is currently confusing" [\#503](https://github.com/nodegit/nodegit/pull/503) ([thgaskell](https://github.com/thgaskell)) + +- Improve coverage [\#502](https://github.com/nodegit/nodegit/pull/502) ([tbranyen](https://github.com/tbranyen)) + +- Adds in CPP code coverage and joined JS [\#499](https://github.com/nodegit/nodegit/pull/499) ([tbranyen](https://github.com/tbranyen)) + +- Add twitter username to README.md [\#498](https://github.com/nodegit/nodegit/pull/498) ([johnhaley81](https://github.com/johnhaley81)) + +- Fix symbolic reference handling in getReferences [\#496](https://github.com/nodegit/nodegit/pull/496) ([billt2006](https://github.com/billt2006)) + +- Enable `git\_stash\_foreach` [\#495](https://github.com/nodegit/nodegit/pull/495) ([johnhaley81](https://github.com/johnhaley81)) + +- Guide navigation is currently confusing [\#494](https://github.com/nodegit/nodegit/pull/494) ([tbranyen](https://github.com/tbranyen)) + +- Fix gitter badge for npm [\#492](https://github.com/nodegit/nodegit/pull/492) ([billt2006](https://github.com/billt2006)) + +- Add automatically generated change log file. [\#465](https://github.com/nodegit/nodegit/pull/465) ([skywinder](https://github.com/skywinder)) + +## v0.3.3 [(2015-03-16)](https://github.com/nodegit/nodegit/tree/v0.3.3) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.3.2...v0.3.3) + +**Merged pull requests:** + +- Download all dev dependencies before build [\#491](https://github.com/nodegit/nodegit/pull/491) ([johnhaley81](https://github.com/johnhaley81)) + +## v0.3.2 [(2015-03-16)](https://github.com/nodegit/nodegit/tree/v0.3.2) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.3.1...v0.3.2) + +**Closed issues:** + +- Amazon S3 CDN link is broken [\#482](https://github.com/nodegit/nodegit/issues/482) + +**Merged pull requests:** + +- Confirm builder exists before building [\#490](https://github.com/nodegit/nodegit/pull/490) ([johnhaley81](https://github.com/johnhaley81)) + +## v0.3.1 [(2015-03-14)](https://github.com/nodegit/nodegit/tree/v0.3.1) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.3.0...v0.3.1) + +**Merged pull requests:** + +- Revert node-pre-gyp to install not build [\#486](https://github.com/nodegit/nodegit/pull/486) ([tbranyen](https://github.com/tbranyen)) + +## v0.3.0 [(2015-03-13)](https://github.com/nodegit/nodegit/tree/v0.3.0) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.2.7...v0.3.0) + +**Closed issues:** + +- Push [\#463](https://github.com/nodegit/nodegit/issues/463) + +- Suppress astyle errors [\#459](https://github.com/nodegit/nodegit/issues/459) + +- io.js support [\#447](https://github.com/nodegit/nodegit/issues/447) + +- Meteor: icon fonts not working \(Resource interpreted as Font but transferred with MIME type text/html\) [\#443](https://github.com/nodegit/nodegit/issues/443) + +- AnnotatedCommit.x listing as Annotated.commitX [\#437](https://github.com/nodegit/nodegit/issues/437) + +- fetchAll\(\) fails unless a default signature is available [\#431](https://github.com/nodegit/nodegit/issues/431) + +- Question: Is there a certificateCheck option available for pushing to a remote repository? [\#420](https://github.com/nodegit/nodegit/issues/420) + +- Repository.open returns empty object [\#412](https://github.com/nodegit/nodegit/issues/412) + +- Missing documentation for Tree.walk\(\) [\#411](https://github.com/nodegit/nodegit/issues/411) + +- comparing from 0.1.4 to 0.2.0 [\#410](https://github.com/nodegit/nodegit/issues/410) + +- Potential example issue in add-and-commit.js L45-48 [\#409](https://github.com/nodegit/nodegit/issues/409) + +- failed to install on ubuntu 14.04 [\#408](https://github.com/nodegit/nodegit/issues/408) + +- Return promises instead of nesting them [\#407](https://github.com/nodegit/nodegit/issues/407) + +- segfault when cloning from private BitBucket repo [\#406](https://github.com/nodegit/nodegit/issues/406) + +- Subtrees + custom error handling [\#400](https://github.com/nodegit/nodegit/issues/400) + +- How to use nodegit in atom shell ..... [\#393](https://github.com/nodegit/nodegit/issues/393) + +- Cannot create a new branch [\#391](https://github.com/nodegit/nodegit/issues/391) + +- Remove fixappveyor from clone tests [\#385](https://github.com/nodegit/nodegit/issues/385) + +- Commit isn't working [\#381](https://github.com/nodegit/nodegit/issues/381) + +- Rename combyne folder to templates [\#378](https://github.com/nodegit/nodegit/issues/378) + +- Cloning SSH repos seem to fail [\#372](https://github.com/nodegit/nodegit/issues/372) + +- Commit.getDiff is backwards? [\#368](https://github.com/nodegit/nodegit/issues/368) + +- List all files in repo \(git ls-tree\) [\#365](https://github.com/nodegit/nodegit/issues/365) + +- Checking out a branch [\#361](https://github.com/nodegit/nodegit/issues/361) + +- nodegit no longer builds in nwjs [\#360](https://github.com/nodegit/nodegit/issues/360) + +- Module install/build fails on Heroku [\#332](https://github.com/nodegit/nodegit/issues/332) + +- 2 Step Authentication [\#323](https://github.com/nodegit/nodegit/issues/323) + +**Merged pull requests:** + +- Rename `Annotated` to `AnnotatedCommit` [\#485](https://github.com/nodegit/nodegit/pull/485) ([johnhaley81](https://github.com/johnhaley81)) + +- Bump version to 0.3.0 [\#484](https://github.com/nodegit/nodegit/pull/484) ([johnhaley81](https://github.com/johnhaley81)) + +- Remove unneeded connect call from push example [\#483](https://github.com/nodegit/nodegit/pull/483) ([johnhaley81](https://github.com/johnhaley81)) + +- Update push example [\#481](https://github.com/nodegit/nodegit/pull/481) ([billt2006](https://github.com/billt2006)) + +- Fix trailing space in atom-shell windows install [\#480](https://github.com/nodegit/nodegit/pull/480) ([johnhaley81](https://github.com/johnhaley81)) + +- Fix atom-shell install on windows [\#479](https://github.com/nodegit/nodegit/pull/479) ([johnhaley81](https://github.com/johnhaley81)) + +- Updated API documentation link to work with NPM's markdown renderer. [\#477](https://github.com/nodegit/nodegit/pull/477) ([hughfdjackson](https://github.com/hughfdjackson)) + +- Add option to `fetch` to prune the remote afterwards [\#476](https://github.com/nodegit/nodegit/pull/476) ([johnhaley81](https://github.com/johnhaley81)) + +- Make index.addAll use status to increase performance [\#475](https://github.com/nodegit/nodegit/pull/475) ([maxkorp](https://github.com/maxkorp)) + +- Add defaults to `Remote.prototype.push` [\#474](https://github.com/nodegit/nodegit/pull/474) ([johnhaley81](https://github.com/johnhaley81)) + +- Fix `createCommitOnHead` [\#473](https://github.com/nodegit/nodegit/pull/473) ([johnhaley81](https://github.com/johnhaley81)) + +- Move guides around to remove subindexes [\#472](https://github.com/nodegit/nodegit/pull/472) ([orderedlist](https://github.com/orderedlist)) + +- Put `Remote.Push` on the remote prototype [\#470](https://github.com/nodegit/nodegit/pull/470) ([johnhaley81](https://github.com/johnhaley81)) + +- Change Repository.prototype.setHead to be asynchronous [\#469](https://github.com/nodegit/nodegit/pull/469) ([jrbalsano](https://github.com/jrbalsano)) + +- Test in Node 0.12 and io.js [\#468](https://github.com/nodegit/nodegit/pull/468) ([tbranyen](https://github.com/tbranyen)) + +- Add checkoutBranch convenience method [\#466](https://github.com/nodegit/nodegit/pull/466) ([johnhaley81](https://github.com/johnhaley81)) + +- Don't assign enums to \_\_proto\_\_ [\#464](https://github.com/nodegit/nodegit/pull/464) ([orderedlist](https://github.com/orderedlist)) + +- Fix push example [\#462](https://github.com/nodegit/nodegit/pull/462) ([johnhaley81](https://github.com/johnhaley81)) + +- Adds support for strarray in structs [\#461](https://github.com/nodegit/nodegit/pull/461) ([orderedlist](https://github.com/orderedlist)) + +- supress astyle warnings [\#460](https://github.com/nodegit/nodegit/pull/460) ([maxkorp](https://github.com/maxkorp)) + +- Template proto functions [\#458](https://github.com/nodegit/nodegit/pull/458) ([maxkorp](https://github.com/maxkorp)) + +- Remote push [\#457](https://github.com/nodegit/nodegit/pull/457) ([mattyclarkson](https://github.com/mattyclarkson)) + +- Include missing lib files in nodegit.js template [\#455](https://github.com/nodegit/nodegit/pull/455) ([orderedlist](https://github.com/orderedlist)) + +- StrArray memory fix [\#454](https://github.com/nodegit/nodegit/pull/454) ([mattyclarkson](https://github.com/mattyclarkson)) + +- Better cloning with NodeGit [\#453](https://github.com/nodegit/nodegit/pull/453) ([tbranyen](https://github.com/tbranyen)) + +- Add Diff.prototype.findSimilar [\#452](https://github.com/nodegit/nodegit/pull/452) ([orderedlist](https://github.com/orderedlist)) + +- Str array converter fix [\#451](https://github.com/nodegit/nodegit/pull/451) ([mattyclarkson](https://github.com/mattyclarkson)) + +- Default signature always returns valid signature [\#450](https://github.com/nodegit/nodegit/pull/450) ([johnhaley81](https://github.com/johnhaley81)) + +- Status.byIndex and StatusEntry [\#448](https://github.com/nodegit/nodegit/pull/448) ([orderedlist](https://github.com/orderedlist)) + +- Upgrade to nan 1.7.0 [\#446](https://github.com/nodegit/nodegit/pull/446) ([orderedlist](https://github.com/orderedlist)) + +- Added in an HTTP url for test [\#445](https://github.com/nodegit/nodegit/pull/445) ([tbranyen](https://github.com/tbranyen)) + +- Add examples [\#442](https://github.com/nodegit/nodegit/pull/442) ([johnhaley81](https://github.com/johnhaley81)) + +- hide callback payloads from javascript [\#441](https://github.com/nodegit/nodegit/pull/441) ([maxkorp](https://github.com/maxkorp)) + +- Fix transfer callback stats [\#440](https://github.com/nodegit/nodegit/pull/440) ([johnhaley81](https://github.com/johnhaley81)) + +- Automatically free repositories post clone [\#434](https://github.com/nodegit/nodegit/pull/434) ([tbranyen](https://github.com/tbranyen)) + +- Skip transfer progress test until it's fixed [\#433](https://github.com/nodegit/nodegit/pull/433) ([johnhaley81](https://github.com/johnhaley81)) + +- Change environment to default for upgraded service [\#428](https://github.com/nodegit/nodegit/pull/428) ([maxkorp](https://github.com/maxkorp)) + +- Make the `git\_remote\_push` function async [\#427](https://github.com/nodegit/nodegit/pull/427) ([johnhaley81](https://github.com/johnhaley81)) + +- Attempt to fix Windows file locking bug [\#425](https://github.com/nodegit/nodegit/pull/425) ([tbranyen](https://github.com/tbranyen)) + +- Fix seg faults [\#424](https://github.com/nodegit/nodegit/pull/424) ([johnhaley81](https://github.com/johnhaley81)) + +- Clean up the persisting of props [\#423](https://github.com/nodegit/nodegit/pull/423) ([johnhaley81](https://github.com/johnhaley81)) + +- Fix indexEntry construction and blobFromBuffer [\#422](https://github.com/nodegit/nodegit/pull/422) ([orderedlist](https://github.com/orderedlist)) + +- Allow for saving of props to an object [\#421](https://github.com/nodegit/nodegit/pull/421) ([johnhaley81](https://github.com/johnhaley81)) + +- Fixes segfault issue recorded in \#406 [\#419](https://github.com/nodegit/nodegit/pull/419) ([tbranyen](https://github.com/tbranyen)) + +- Update jsdoc and ignore some methods [\#418](https://github.com/nodegit/nodegit/pull/418) ([orderedlist](https://github.com/orderedlist)) + +- Converting Examples to Guides [\#417](https://github.com/nodegit/nodegit/pull/417) ([tbranyen](https://github.com/tbranyen)) + +- Fix callbacks with just return value and single payload [\#416](https://github.com/nodegit/nodegit/pull/416) ([johnhaley81](https://github.com/johnhaley81)) + +- Add `git\_reset` and `git\_reset\_default` [\#415](https://github.com/nodegit/nodegit/pull/415) ([johnhaley81](https://github.com/johnhaley81)) + +- Enable `git\_index\_remove\_all` and `git\_index\_update\_all` [\#414](https://github.com/nodegit/nodegit/pull/414) ([johnhaley81](https://github.com/johnhaley81)) + +- Added code for `git\_strarray` and enabled `git\_index\_add\_all` [\#413](https://github.com/nodegit/nodegit/pull/413) ([johnhaley81](https://github.com/johnhaley81)) + +- Tree Entry getBlob\(\) should also support the callback pattern. [\#405](https://github.com/nodegit/nodegit/pull/405) ([jeffwilcox](https://github.com/jeffwilcox)) + +- Adds in git\_checkout\_tree [\#402](https://github.com/nodegit/nodegit/pull/402) ([tbranyen](https://github.com/tbranyen)) + +- Made changes to the README [\#399](https://github.com/nodegit/nodegit/pull/399) ([tbranyen](https://github.com/tbranyen)) + +- Expose gc so all tests run in CI [\#398](https://github.com/nodegit/nodegit/pull/398) ([tbranyen](https://github.com/tbranyen)) + +- One more dependency update [\#397](https://github.com/nodegit/nodegit/pull/397) ([maxkorp](https://github.com/maxkorp)) + +- Update moar dependencies. [\#396](https://github.com/nodegit/nodegit/pull/396) ([maxkorp](https://github.com/maxkorp)) + +- Updated most dependencies to latest versions [\#394](https://github.com/nodegit/nodegit/pull/394) ([johnhaley81](https://github.com/johnhaley81)) + +- Index diffing [\#392](https://github.com/nodegit/nodegit/pull/392) ([orderedlist](https://github.com/orderedlist)) + +- Update to libgit2 v0.22.1 [\#390](https://github.com/nodegit/nodegit/pull/390) ([johnhaley81](https://github.com/johnhaley81)) + +- Fix test issues [\#388](https://github.com/nodegit/nodegit/pull/388) ([maxkorp](https://github.com/maxkorp)) + +- Fix building when a space is in the path [\#387](https://github.com/nodegit/nodegit/pull/387) ([billt2006](https://github.com/billt2006)) + +- General maintenance [\#386](https://github.com/nodegit/nodegit/pull/386) ([maxkorp](https://github.com/maxkorp)) + +- Add 2 convenience methods to revwalk [\#384](https://github.com/nodegit/nodegit/pull/384) ([maxkorp](https://github.com/maxkorp)) + +- Make all cred generators sync. [\#377](https://github.com/nodegit/nodegit/pull/377) ([maxkorp](https://github.com/maxkorp)) + +- Status and StatusList [\#374](https://github.com/nodegit/nodegit/pull/374) ([orderedlist](https://github.com/orderedlist)) + +- Fix the package scripts [\#373](https://github.com/nodegit/nodegit/pull/373) ([maxkorp](https://github.com/maxkorp)) + +- Removes Node 0.11 testing completely [\#371](https://github.com/nodegit/nodegit/pull/371) ([tbranyen](https://github.com/tbranyen)) + +- Allow null trees on Diff.treeToTree [\#370](https://github.com/nodegit/nodegit/pull/370) ([orderedlist](https://github.com/orderedlist)) + +- Atom shell support [\#369](https://github.com/nodegit/nodegit/pull/369) ([maxkorp](https://github.com/maxkorp)) + +- `Checkout.head` initializes options if none are passed [\#367](https://github.com/nodegit/nodegit/pull/367) ([johnhaley81](https://github.com/johnhaley81)) + +- INCLUDE\_UNTRACKED option not working for diffs [\#366](https://github.com/nodegit/nodegit/pull/366) ([kmctown](https://github.com/kmctown)) + +- Updated fs-extra to 0.15.0 [\#363](https://github.com/nodegit/nodegit/pull/363) ([johnhaley81](https://github.com/johnhaley81)) + +- Make remote\#download async [\#326](https://github.com/nodegit/nodegit/pull/326) ([tbranyen](https://github.com/tbranyen)) + +- Enable transfer progress [\#325](https://github.com/nodegit/nodegit/pull/325) ([tbranyen](https://github.com/tbranyen)) + +## v0.2.7 [(2015-01-21)](https://github.com/nodegit/nodegit/tree/v0.2.7) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.2.6...v0.2.7) + +## v0.2.6 [(2015-01-20)](https://github.com/nodegit/nodegit/tree/v0.2.6) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.2.5...v0.2.6) + +**Merged pull requests:** + +- \[WIP\] Added in some diff functions from libgit2 [\#348](https://github.com/nodegit/nodegit/pull/348) ([johnhaley81](https://github.com/johnhaley81)) + +## v0.2.5 [(2015-01-20)](https://github.com/nodegit/nodegit/tree/v0.2.5) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.2.4...v0.2.5) + +**Closed issues:** + +- Lookup a non existent commit crashes the process. [\#353](https://github.com/nodegit/nodegit/issues/353) + +- Why node-git uses 90% rotated hexagon? [\#344](https://github.com/nodegit/nodegit/issues/344) + +- Needed pull example or help with code [\#341](https://github.com/nodegit/nodegit/issues/341) + +- Can't require nodegit without building it explicitly [\#340](https://github.com/nodegit/nodegit/issues/340) + +- Tracking down bugs [\#331](https://github.com/nodegit/nodegit/issues/331) + +- Document possible values of CloneOptions [\#330](https://github.com/nodegit/nodegit/issues/330) + +- Require generating error [\#329](https://github.com/nodegit/nodegit/issues/329) + +- Failed getting Banner [\#328](https://github.com/nodegit/nodegit/issues/328) + +- Documentation broken [\#327](https://github.com/nodegit/nodegit/issues/327) + +- Fetch doesn't seem to work with https urls. [\#322](https://github.com/nodegit/nodegit/issues/322) + +**Merged pull requests:** + +- Refactor installation and publication [\#359](https://github.com/nodegit/nodegit/pull/359) ([maxkorp](https://github.com/maxkorp)) + +- Lint examples [\#358](https://github.com/nodegit/nodegit/pull/358) ([maxkorp](https://github.com/maxkorp)) + +- Commit.getParents working with merge commits [\#357](https://github.com/nodegit/nodegit/pull/357) ([bjornarg](https://github.com/bjornarg)) + +- Fixed a typo in the debug build instruction. [\#356](https://github.com/nodegit/nodegit/pull/356) ([mcollina](https://github.com/mcollina)) + +- \[WIP\] Attempt at fixing appveyor [\#352](https://github.com/nodegit/nodegit/pull/352) ([johnhaley81](https://github.com/johnhaley81)) + +- Updated to nan 1.5.0 and fixed build errors [\#351](https://github.com/nodegit/nodegit/pull/351) ([johnhaley81](https://github.com/johnhaley81)) + +- Added debug build instructions. [\#349](https://github.com/nodegit/nodegit/pull/349) ([mcollina](https://github.com/mcollina)) + +- Added checkout head method and tests [\#347](https://github.com/nodegit/nodegit/pull/347) ([johnhaley81](https://github.com/johnhaley81)) + +- bump devDependencies [\#346](https://github.com/nodegit/nodegit/pull/346) ([PeterDaveHello](https://github.com/PeterDaveHello)) + +- Update dependency node-pre-gyp to ~0.6 [\#345](https://github.com/nodegit/nodegit/pull/345) ([PeterDaveHello](https://github.com/PeterDaveHello)) + +- Update dependency fs-extra to ~0.14.0 [\#343](https://github.com/nodegit/nodegit/pull/343) ([PeterDaveHello](https://github.com/PeterDaveHello)) + +- Add Dependency badge in readme [\#342](https://github.com/nodegit/nodegit/pull/342) ([PeterDaveHello](https://github.com/PeterDaveHello)) + +- Fixed promise chain on install [\#339](https://github.com/nodegit/nodegit/pull/339) ([johnhaley81](https://github.com/johnhaley81)) + +- Do not double free during callbacks. [\#338](https://github.com/nodegit/nodegit/pull/338) ([mcollina](https://github.com/mcollina)) + +- Use svg instead of png to get better image quality [\#337](https://github.com/nodegit/nodegit/pull/337) ([PeterDaveHello](https://github.com/PeterDaveHello)) + +- Update to libgit 0.21.4 [\#336](https://github.com/nodegit/nodegit/pull/336) ([johnhaley81](https://github.com/johnhaley81)) + +- Fix issue 333 [\#334](https://github.com/nodegit/nodegit/pull/334) ([johnhaley81](https://github.com/johnhaley81)) + +- Update appveyor.yml to remove project id [\#324](https://github.com/nodegit/nodegit/pull/324) ([vladikoff](https://github.com/vladikoff)) + +- moving some deps to devdeps [\#320](https://github.com/nodegit/nodegit/pull/320) ([maxkorp](https://github.com/maxkorp)) + +## v0.2.4 [(2014-12-05)](https://github.com/nodegit/nodegit/tree/v0.2.4) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.2.3...v0.2.4) + +**Closed issues:** + +- Fetch does not really fetch [\#314](https://github.com/nodegit/nodegit/issues/314) + +- Generate Missing Tests - Unable [\#313](https://github.com/nodegit/nodegit/issues/313) + +- Unable to get reference for HEAD [\#311](https://github.com/nodegit/nodegit/issues/311) + +- nodegit.Signature.now broken [\#306](https://github.com/nodegit/nodegit/issues/306) + +- current branch [\#305](https://github.com/nodegit/nodegit/issues/305) + +- Module fails to load [\#299](https://github.com/nodegit/nodegit/issues/299) + +- How to list all tags? [\#298](https://github.com/nodegit/nodegit/issues/298) + +- Building for ARM [\#292](https://github.com/nodegit/nodegit/issues/292) + +- Next release checklist [\#256](https://github.com/nodegit/nodegit/issues/256) + +**Merged pull requests:** + +- Fixed fetch to be async and use callbacks [\#319](https://github.com/nodegit/nodegit/pull/319) ([johnhaley81](https://github.com/johnhaley81)) + +- Make contributing.md generic and add testing.md [\#318](https://github.com/nodegit/nodegit/pull/318) ([maxkorp](https://github.com/maxkorp)) + +- Fix repo init ext [\#317](https://github.com/nodegit/nodegit/pull/317) ([maxkorp](https://github.com/maxkorp)) + +- Fix 313 generate scripts [\#315](https://github.com/nodegit/nodegit/pull/315) ([xinUmbralis](https://github.com/xinUmbralis)) + +- Fix \#311 [\#312](https://github.com/nodegit/nodegit/pull/312) ([johnhaley81](https://github.com/johnhaley81)) + +- Fix publishing [\#310](https://github.com/nodegit/nodegit/pull/310) ([maxkorp](https://github.com/maxkorp)) + +- detect node-webkit and build with nw-gyp [\#309](https://github.com/nodegit/nodegit/pull/309) ([maxkorp](https://github.com/maxkorp)) + +- fix signature.now and add signature tests [\#308](https://github.com/nodegit/nodegit/pull/308) ([maxkorp](https://github.com/maxkorp)) + +- move nodegit.js to a template to remove idefs dependency [\#303](https://github.com/nodegit/nodegit/pull/303) ([maxkorp](https://github.com/maxkorp)) + +- Fixed tag list and added a test for it [\#300](https://github.com/nodegit/nodegit/pull/300) ([johnhaley81](https://github.com/johnhaley81)) + +- Convenience methods [\#297](https://github.com/nodegit/nodegit/pull/297) ([johnhaley81](https://github.com/johnhaley81)) + +- Clean up the contents of the generate folder [\#296](https://github.com/nodegit/nodegit/pull/296) ([maxkorp](https://github.com/maxkorp)) + +- Styling [\#295](https://github.com/nodegit/nodegit/pull/295) ([maxkorp](https://github.com/maxkorp)) + +## v0.2.3 [(2014-11-25)](https://github.com/nodegit/nodegit/tree/v0.2.3) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.2.2...v0.2.3) + +## v0.2.2 [(2014-11-25)](https://github.com/nodegit/nodegit/tree/v0.2.2) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.2.1...v0.2.2) + +**Merged pull requests:** + +- Moved some dependencies around to help the generate not fail [\#294](https://github.com/nodegit/nodegit/pull/294) ([johnhaley81](https://github.com/johnhaley81)) + +## v0.2.1 [(2014-11-25)](https://github.com/nodegit/nodegit/tree/v0.2.1) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.2.0...v0.2.1) + +**Merged pull requests:** + +- Rewrite installer [\#293](https://github.com/nodegit/nodegit/pull/293) ([johnhaley81](https://github.com/johnhaley81)) + +## v0.2.0 [(2014-11-25)](https://github.com/nodegit/nodegit/tree/v0.2.0) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.1.4...v0.2.0) + +**Closed issues:** + +- Find some way to automatically generate a list of missing tests. [\#272](https://github.com/nodegit/nodegit/issues/272) + +- libgit2 creation methods have name collisions with internal V8 functions [\#271](https://github.com/nodegit/nodegit/issues/271) + +- Enums are still being manually assigned in javascript [\#268](https://github.com/nodegit/nodegit/issues/268) + +- We're using too many promise libraries [\#264](https://github.com/nodegit/nodegit/issues/264) + +- unable to resolve symbolic references [\#262](https://github.com/nodegit/nodegit/issues/262) + +- nodegit installation falls back when Python install dir contains spaces [\#261](https://github.com/nodegit/nodegit/issues/261) + +- Probe features [\#245](https://github.com/nodegit/nodegit/issues/245) + +- require\('path'\).Repo.open\(...\) returns {} [\#241](https://github.com/nodegit/nodegit/issues/241) + +- RevWalk malloc error [\#239](https://github.com/nodegit/nodegit/issues/239) + +- OS X tests in Travis-CI [\#237](https://github.com/nodegit/nodegit/issues/237) + +- Fix RevWalk tests [\#236](https://github.com/nodegit/nodegit/issues/236) + +- Simple clone fails. [\#231](https://github.com/nodegit/nodegit/issues/231) + +- Create templates for remaining src and include files [\#230](https://github.com/nodegit/nodegit/issues/230) + +- Error: SSL is not supported by this copy of libgit2. [\#228](https://github.com/nodegit/nodegit/issues/228) + +- error while install nodegit latest version 0.1.4 [\#225](https://github.com/nodegit/nodegit/issues/225) + +- error while install nodegit latest version 0.1.4 [\#224](https://github.com/nodegit/nodegit/issues/224) + +- Did getReferences dissapear? [\#223](https://github.com/nodegit/nodegit/issues/223) + +- Again for \#147 [\#218](https://github.com/nodegit/nodegit/issues/218) + +- Update documentation on nodegit.org [\#217](https://github.com/nodegit/nodegit/issues/217) + +- Stable = bump to 1.0 [\#215](https://github.com/nodegit/nodegit/issues/215) + +- Example on nodegit.com homepage is invalid [\#211](https://github.com/nodegit/nodegit/issues/211) + +- tree.diffWorkDir deprecated? [\#209](https://github.com/nodegit/nodegit/issues/209) + +- Abort on getRemotes [\#201](https://github.com/nodegit/nodegit/issues/201) + +- Generic Logging/Tracing mechanism [\#199](https://github.com/nodegit/nodegit/issues/199) + +- Repo\#openIndex missing [\#197](https://github.com/nodegit/nodegit/issues/197) + +- Documentation on http://www.nodegit.org/ out of date [\#196](https://github.com/nodegit/nodegit/issues/196) + +- Remove extern "C" with 0.21 bump [\#193](https://github.com/nodegit/nodegit/issues/193) + +- CloneOptions documentation lacking [\#192](https://github.com/nodegit/nodegit/issues/192) + +- Webpage examples are not up to date [\#190](https://github.com/nodegit/nodegit/issues/190) + +- Automatically generate structs from types array [\#187](https://github.com/nodegit/nodegit/issues/187) + +- Error: connect ETIMEDOUT during install [\#179](https://github.com/nodegit/nodegit/issues/179) + +- TODO [\#177](https://github.com/nodegit/nodegit/issues/177) + +- Notes [\#176](https://github.com/nodegit/nodegit/issues/176) + +- Integration improvements. [\#171](https://github.com/nodegit/nodegit/issues/171) + +**Merged pull requests:** + +- add history.md and update readme [\#291](https://github.com/nodegit/nodegit/pull/291) ([maxkorp](https://github.com/maxkorp)) + +- Added tests for commit [\#290](https://github.com/nodegit/nodegit/pull/290) ([nkt](https://github.com/nkt)) + +- Added editor config [\#289](https://github.com/nodegit/nodegit/pull/289) ([nkt](https://github.com/nkt)) + +- \[WIP\] Push example [\#288](https://github.com/nodegit/nodegit/pull/288) ([johnhaley81](https://github.com/johnhaley81)) + +- \[WIP\] Better installation flow [\#287](https://github.com/nodegit/nodegit/pull/287) ([tbranyen](https://github.com/tbranyen)) + +- Merge methods and tests and examples [\#286](https://github.com/nodegit/nodegit/pull/286) ([maxkorp](https://github.com/maxkorp)) + +- Add details-for-tree-entry [\#285](https://github.com/nodegit/nodegit/pull/285) ([johnhaley81](https://github.com/johnhaley81)) + +- Add repo init example [\#284](https://github.com/nodegit/nodegit/pull/284) ([maxkorp](https://github.com/maxkorp)) + +- update add-and-commit example to include new paths [\#283](https://github.com/nodegit/nodegit/pull/283) ([maxkorp](https://github.com/maxkorp)) + +- General cleanup [\#282](https://github.com/nodegit/nodegit/pull/282) ([maxkorp](https://github.com/maxkorp)) + +- Added osx for testing on Travis [\#281](https://github.com/nodegit/nodegit/pull/281) ([johnhaley81](https://github.com/johnhaley81)) + +- Added " around python path to help fix issues with spaces in path [\#280](https://github.com/nodegit/nodegit/pull/280) ([johnhaley81](https://github.com/johnhaley81)) + +- Tests for branch class [\#279](https://github.com/nodegit/nodegit/pull/279) ([johnhaley81](https://github.com/johnhaley81)) + +- Exposes the NodeGit Promise implementation [\#278](https://github.com/nodegit/nodegit/pull/278) ([tbranyen](https://github.com/tbranyen)) + +- \[WIP\] Update examples [\#276](https://github.com/nodegit/nodegit/pull/276) ([johnhaley81](https://github.com/johnhaley81)) + +- Added script to generate missing tests [\#274](https://github.com/nodegit/nodegit/pull/274) ([johnhaley81](https://github.com/johnhaley81)) + +- Rename new [\#273](https://github.com/nodegit/nodegit/pull/273) ([maxkorp](https://github.com/maxkorp)) + +- MSBUILD doesn't allow an array of size 0 [\#270](https://github.com/nodegit/nodegit/pull/270) ([johnhaley81](https://github.com/johnhaley81)) + +- \[WIP\] generate enum definitions [\#269](https://github.com/nodegit/nodegit/pull/269) ([maxkorp](https://github.com/maxkorp)) + +- add Refs.nameToId and test [\#267](https://github.com/nodegit/nodegit/pull/267) ([maxkorp](https://github.com/maxkorp)) + +- voidcheck string pointers and reenable attr test [\#266](https://github.com/nodegit/nodegit/pull/266) ([maxkorp](https://github.com/maxkorp)) + +- require --documentation flag to include text in idefs [\#265](https://github.com/nodegit/nodegit/pull/265) ([maxkorp](https://github.com/maxkorp)) + +- Added ability for callbacks to poll promises for fulfillment value [\#260](https://github.com/nodegit/nodegit/pull/260) ([johnhaley81](https://github.com/johnhaley81)) + +- Generate nodegit from libgit2 docs and refactor descriptor [\#259](https://github.com/nodegit/nodegit/pull/259) ([johnhaley81](https://github.com/johnhaley81)) + +- Fix revwalk tests [\#258](https://github.com/nodegit/nodegit/pull/258) ([maxkorp](https://github.com/maxkorp)) + +- Bump to latest libgit2 [\#257](https://github.com/nodegit/nodegit/pull/257) ([tbranyen](https://github.com/tbranyen)) + +- Use Start-Process to start pageant.exe [\#254](https://github.com/nodegit/nodegit/pull/254) ([FeodorFitsner](https://github.com/FeodorFitsner)) + +- Adds in a broken unit test for \#109 [\#252](https://github.com/nodegit/nodegit/pull/252) ([tbranyen](https://github.com/tbranyen)) + +- Added more git\_cred methods [\#251](https://github.com/nodegit/nodegit/pull/251) ([johnhaley81](https://github.com/johnhaley81)) + +- Refactor classes [\#250](https://github.com/nodegit/nodegit/pull/250) ([maxkorp](https://github.com/maxkorp)) + +- Update Readme, to improve example code [\#248](https://github.com/nodegit/nodegit/pull/248) ([nmn](https://github.com/nmn)) + +- \[TEST\] Appveyor agent [\#247](https://github.com/nodegit/nodegit/pull/247) ([tbranyen](https://github.com/tbranyen)) + +- Refactor classes [\#246](https://github.com/nodegit/nodegit/pull/246) ([maxkorp](https://github.com/maxkorp)) + +- Buf methods [\#244](https://github.com/nodegit/nodegit/pull/244) ([tbranyen](https://github.com/tbranyen)) + +- Branch methods [\#243](https://github.com/nodegit/nodegit/pull/243) ([tbranyen](https://github.com/tbranyen)) + +- Blame methods [\#242](https://github.com/nodegit/nodegit/pull/242) ([tbranyen](https://github.com/tbranyen)) + +- Add revwalk.hide and revwalk.simplifyFirstParent [\#235](https://github.com/nodegit/nodegit/pull/235) ([tbranyen](https://github.com/tbranyen)) + +- Add revwalk.hide and revwalk.simplifyFirstParent [\#234](https://github.com/nodegit/nodegit/pull/234) ([orderedlist](https://github.com/orderedlist)) + +- Moved wrapper/copy out of include/src [\#233](https://github.com/nodegit/nodegit/pull/233) ([johnhaley81](https://github.com/johnhaley81)) + +- Removed ejs dependency [\#232](https://github.com/nodegit/nodegit/pull/232) ([johnhaley81](https://github.com/johnhaley81)) + +- Bump to latest libgit2 [\#229](https://github.com/nodegit/nodegit/pull/229) ([tbranyen](https://github.com/tbranyen)) + +- WIP: Refactor source generation templates from EJS to Combyne [\#227](https://github.com/nodegit/nodegit/pull/227) ([tbranyen](https://github.com/tbranyen)) + +- Test fixes [\#226](https://github.com/nodegit/nodegit/pull/226) ([johnhaley81](https://github.com/johnhaley81)) + +- Added new methods in checkout and repository [\#207](https://github.com/nodegit/nodegit/pull/207) ([tbranyen](https://github.com/tbranyen)) + +- Added additional remote methods [\#206](https://github.com/nodegit/nodegit/pull/206) ([tbranyen](https://github.com/tbranyen)) + +- Added git\_remote\_url and git\_remote\_load [\#205](https://github.com/nodegit/nodegit/pull/205) ([tbranyen](https://github.com/tbranyen)) + +- Add in remote listing support and test [\#204](https://github.com/nodegit/nodegit/pull/204) ([tbranyen](https://github.com/tbranyen)) + +- Attr methods [\#203](https://github.com/nodegit/nodegit/pull/203) ([tbranyen](https://github.com/tbranyen)) + +- Support latest libgit2 v0.21.0 [\#200](https://github.com/nodegit/nodegit/pull/200) ([tbranyen](https://github.com/tbranyen)) + +- Add Repo.openIndex [\#198](https://github.com/nodegit/nodegit/pull/198) ([tbranyen](https://github.com/tbranyen)) + +- Clone methods [\#195](https://github.com/nodegit/nodegit/pull/195) ([tbranyen](https://github.com/tbranyen)) + +- Remove all unused vendor directories [\#194](https://github.com/nodegit/nodegit/pull/194) ([tbranyen](https://github.com/tbranyen)) + +- \[WIP\] Mocha integration [\#189](https://github.com/nodegit/nodegit/pull/189) ([tbranyen](https://github.com/tbranyen)) + +- Auto gen structs [\#188](https://github.com/nodegit/nodegit/pull/188) ([tbranyen](https://github.com/tbranyen)) + +- Add in support for repository init ext [\#186](https://github.com/nodegit/nodegit/pull/186) ([tbranyen](https://github.com/tbranyen)) + +- moved libgit2 gyp to separate dir [\#184](https://github.com/nodegit/nodegit/pull/184) ([deepak1556](https://github.com/deepak1556)) + +- Remove all generated source code. [\#181](https://github.com/nodegit/nodegit/pull/181) ([tbranyen](https://github.com/tbranyen)) + +- Better installation flow for developing. [\#180](https://github.com/nodegit/nodegit/pull/180) ([tbranyen](https://github.com/tbranyen)) + +## v0.1.4 [(2014-06-13)](https://github.com/nodegit/nodegit/tree/v0.1.4) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.1.3...v0.1.4) + +**Closed issues:** + +- Redis Backend [\#173](https://github.com/nodegit/nodegit/issues/173) + +- using "Branch" object results in "undefined" error =\> branch.cc missing from binding.gyp? [\#166](https://github.com/nodegit/nodegit/issues/166) + +- Windows: Failure on install [\#158](https://github.com/nodegit/nodegit/issues/158) + +- Can't install v0.1.2 under OSX [\#155](https://github.com/nodegit/nodegit/issues/155) + +**Merged pull requests:** + +- \[WIP\] Prebuilt binaries. [\#178](https://github.com/nodegit/nodegit/pull/178) ([tbranyen](https://github.com/tbranyen)) + +- NodeJS v0.11.13 compatibility [\#175](https://github.com/nodegit/nodegit/pull/175) ([3y3](https://github.com/3y3)) + +- Fixed: "ReferenceError: error is not defined" [\#169](https://github.com/nodegit/nodegit/pull/169) ([danyshaanan](https://github.com/danyshaanan)) + +## v0.1.3 [(2014-05-02)](https://github.com/nodegit/nodegit/tree/v0.1.3) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.1.2...v0.1.3) + +**Merged pull requests:** + +- Fix erroneous OS detection for installation in OS X. [\#156](https://github.com/nodegit/nodegit/pull/156) ([tbranyen](https://github.com/tbranyen)) + +## v0.1.2 [(2014-05-02)](https://github.com/nodegit/nodegit/tree/v0.1.2) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.1.1...v0.1.2) + +**Closed issues:** + +- gyp ERR cannot find -lgit2 [\#150](https://github.com/nodegit/nodegit/issues/150) + +- Read file from git server [\#145](https://github.com/nodegit/nodegit/issues/145) + +- "emulate git log" example error [\#144](https://github.com/nodegit/nodegit/issues/144) + +- repo.workdir\(\) crashes \(SIGSEGV\) on a bare repo [\#128](https://github.com/nodegit/nodegit/issues/128) + +- How to create Branch using the API? [\#124](https://github.com/nodegit/nodegit/issues/124) + +- 'npm run-script gen && npm install' on Ubuntu 13.04 [\#122](https://github.com/nodegit/nodegit/issues/122) + +- Error while installing Nodegit 0.1.0 [\#120](https://github.com/nodegit/nodegit/issues/120) + +- Question: How would I implement the equivalent of `git status`? [\#117](https://github.com/nodegit/nodegit/issues/117) + +- Sync versions of all the methods [\#115](https://github.com/nodegit/nodegit/issues/115) + +- Tick version \# [\#107](https://github.com/nodegit/nodegit/issues/107) + +- Windows support [\#71](https://github.com/nodegit/nodegit/issues/71) + +- Create test for history with merge commits [\#64](https://github.com/nodegit/nodegit/issues/64) + +**Merged pull requests:** + +- Fixed OSX Directions [\#143](https://github.com/nodegit/nodegit/pull/143) ([nickpoorman](https://github.com/nickpoorman)) + +- Add ubuntu lib dependencies to the readme [\#141](https://github.com/nodegit/nodegit/pull/141) ([bigthyme](https://github.com/bigthyme)) + +- WIP New installer. [\#140](https://github.com/nodegit/nodegit/pull/140) ([tbranyen](https://github.com/tbranyen)) + +## v0.1.1 [(2014-03-23)](https://github.com/nodegit/nodegit/tree/v0.1.1) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.1.0...v0.1.1) + +**Closed issues:** + +- Misleading Readme [\#138](https://github.com/nodegit/nodegit/issues/138) + +- Cannot find module './build/Debug/nodegit' [\#137](https://github.com/nodegit/nodegit/issues/137) + +- Support for Node 0.11+ [\#134](https://github.com/nodegit/nodegit/issues/134) + +- installer cant seem to find python [\#126](https://github.com/nodegit/nodegit/issues/126) + +- Cannot build when parent directory contains space\(s\) [\#123](https://github.com/nodegit/nodegit/issues/123) + +- question: how cvv8 is used? [\#118](https://github.com/nodegit/nodegit/issues/118) + +- question: gen.js does not generate wrapper.h and wrapper.cc [\#116](https://github.com/nodegit/nodegit/issues/116) + +- tree.diffIndex: pointer being freed was not allocated [\#112](https://github.com/nodegit/nodegit/issues/112) + +- Use as a dependency of another node project? [\#110](https://github.com/nodegit/nodegit/issues/110) + +- Segmentation faults with concurrent access? [\#104](https://github.com/nodegit/nodegit/issues/104) + +- tree.diffWorkDir [\#101](https://github.com/nodegit/nodegit/issues/101) + +- getReference passes unexpected object into callback [\#98](https://github.com/nodegit/nodegit/issues/98) + +- index.removeByPath stops execution [\#97](https://github.com/nodegit/nodegit/issues/97) + +- Missing example: commit to a local repo \(i.e. git add, git commit\) [\#96](https://github.com/nodegit/nodegit/issues/96) + +- Get contents of index entry? [\#94](https://github.com/nodegit/nodegit/issues/94) + +- Failure to Build nodegit at Commit 0aa9a3c120 on OS X 10.6.8 [\#92](https://github.com/nodegit/nodegit/issues/92) + +- TypeError: Cannot call method 'clone' of undefined [\#91](https://github.com/nodegit/nodegit/issues/91) + +- missing cstring [\#88](https://github.com/nodegit/nodegit/issues/88) + +- Installing fails - can't find vendor/libgit2/build [\#80](https://github.com/nodegit/nodegit/issues/80) + +- Improving JavaScript API [\#73](https://github.com/nodegit/nodegit/issues/73) + +- Using code-generation to generate [\#70](https://github.com/nodegit/nodegit/issues/70) + +**Merged pull requests:** + +- Fix and improve testing. [\#139](https://github.com/nodegit/nodegit/pull/139) ([tbranyen](https://github.com/tbranyen)) + +- Support for Node 0.11+ [\#135](https://github.com/nodegit/nodegit/pull/135) ([pierreinglebert](https://github.com/pierreinglebert)) + +- Added git\_diff\_delta\_dup to git\_diff\_get\_patch to fix a memory issue whe... [\#113](https://github.com/nodegit/nodegit/pull/113) ([kmctown](https://github.com/kmctown)) + +- Try requiring build/Debug/nodegit if build/Release/nodegit wasn't found. [\#108](https://github.com/nodegit/nodegit/pull/108) ([papandreou](https://github.com/papandreou)) + +- Updated v0.18.0.json to make the index and DiffOptions arguments in Inde... [\#106](https://github.com/nodegit/nodegit/pull/106) ([kmctown](https://github.com/kmctown)) + +- Duplicate git\_error struct before passing it on [\#105](https://github.com/nodegit/nodegit/pull/105) ([papandreou](https://github.com/papandreou)) + +- Changed v0.18.0.json so diffWorkDir DiffOptions argument is optional. Ad... [\#103](https://github.com/nodegit/nodegit/pull/103) ([kmctown](https://github.com/kmctown)) + +- Reviewd and fixed examples [\#102](https://github.com/nodegit/nodegit/pull/102) ([micha149](https://github.com/micha149)) + +- cmake 2.8 is required to build nodegit [\#100](https://github.com/nodegit/nodegit/pull/100) ([dcolens](https://github.com/dcolens)) + +- new add-and-commit.js and remove-and-commit.js examples [\#99](https://github.com/nodegit/nodegit/pull/99) ([dcolens](https://github.com/dcolens)) + +- Add missing fields to index entry [\#95](https://github.com/nodegit/nodegit/pull/95) ([papandreou](https://github.com/papandreou)) + +- Made the tests pass and making each test self-contained [\#90](https://github.com/nodegit/nodegit/pull/90) ([FrozenCow](https://github.com/FrozenCow)) + +- Fixed compile error: memcpy not defined [\#89](https://github.com/nodegit/nodegit/pull/89) ([FrozenCow](https://github.com/FrozenCow)) + +- Add system dependencies for OSX install [\#82](https://github.com/nodegit/nodegit/pull/82) ([philschatz](https://github.com/philschatz)) + +## v0.1.0 [(2013-09-07)](https://github.com/nodegit/nodegit/tree/v0.1.0) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.0.79...v0.1.0) + +**Closed issues:** + +- The api in README is incorrect [\#87](https://github.com/nodegit/nodegit/issues/87) + +- message\_encoding in documentation for Repo.createCommit [\#86](https://github.com/nodegit/nodegit/issues/86) + +- How to retrieve blob with binary content? [\#83](https://github.com/nodegit/nodegit/issues/83) + +- Incorrect commit oid's when aggregated from commit.history\(\) [\#81](https://github.com/nodegit/nodegit/issues/81) + +- How do you list branches in repo? [\#76](https://github.com/nodegit/nodegit/issues/76) + +- License? [\#74](https://github.com/nodegit/nodegit/issues/74) + +- Nested walks scatter memory and cause SEGFAULTS [\#72](https://github.com/nodegit/nodegit/issues/72) + +- feature request: Provide fileMode / getType method on tree entries [\#67](https://github.com/nodegit/nodegit/issues/67) + +- Document DiffList [\#66](https://github.com/nodegit/nodegit/issues/66) + +- Procedure for moving development to nodegit/nodegit [\#55](https://github.com/nodegit/nodegit/issues/55) + +- Cannot install on OSX [\#49](https://github.com/nodegit/nodegit/issues/49) + +**Merged pull requests:** + +- Codegen [\#79](https://github.com/nodegit/nodegit/pull/79) ([nkallen](https://github.com/nkallen)) + +- Updated LICENSE to MIT [\#75](https://github.com/nodegit/nodegit/pull/75) ([tbranyen](https://github.com/tbranyen)) + +## v0.0.79 [(2013-04-05)](https://github.com/nodegit/nodegit/tree/v0.0.79) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.0.778...v0.0.79) + +**Closed issues:** + +- Clarify commit.history documentation [\#63](https://github.com/nodegit/nodegit/issues/63) + +- Python error on installing nodegit 0.0.77 [\#59](https://github.com/nodegit/nodegit/issues/59) + +## v0.0.778 [(2013-03-26)](https://github.com/nodegit/nodegit/tree/v0.0.778) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.0.77...v0.0.778) + +**Merged pull requests:** + +- See issue \#59 [\#60](https://github.com/nodegit/nodegit/pull/60) ([dctr](https://github.com/dctr)) + +## v0.0.77 [(2013-03-24)](https://github.com/nodegit/nodegit/tree/v0.0.77) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.0.76...v0.0.77) + +## v0.0.76 [(2013-03-24)](https://github.com/nodegit/nodegit/tree/v0.0.76) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.0.75...v0.0.76) + +## v0.0.75 [(2013-03-24)](https://github.com/nodegit/nodegit/tree/v0.0.75) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.0.74...v0.0.75) + +**Closed issues:** + +- incomplete error reporting [\#57](https://github.com/nodegit/nodegit/issues/57) + +- Segmentation Fault in raw-commit.js [\#56](https://github.com/nodegit/nodegit/issues/56) + +- Another Mac OSX install fail [\#53](https://github.com/nodegit/nodegit/issues/53) + +- unit tests broken in travis-ci [\#52](https://github.com/nodegit/nodegit/issues/52) + +- "Image not found" with require\("nodegit"\) on Mac OS X [\#51](https://github.com/nodegit/nodegit/issues/51) + +- Cannot Compile on 0.8.\* [\#47](https://github.com/nodegit/nodegit/issues/47) + +- No suitable image found. [\#46](https://github.com/nodegit/nodegit/issues/46) + +- Fails to require module on latest node version [\#43](https://github.com/nodegit/nodegit/issues/43) + +- Compilation error node 0.6.1 [\#32](https://github.com/nodegit/nodegit/issues/32) + +- commit.history work like slice [\#17](https://github.com/nodegit/nodegit/issues/17) + +- Sync and Async methods [\#16](https://github.com/nodegit/nodegit/issues/16) + +- Comment all code methods [\#1](https://github.com/nodegit/nodegit/issues/1) + +## v0.0.74 [(2013-03-21)](https://github.com/nodegit/nodegit/tree/v0.0.74) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.0.73...v0.0.74) + +## v0.0.73 [(2013-03-21)](https://github.com/nodegit/nodegit/tree/v0.0.73) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.0.72...v0.0.73) + +**Closed issues:** + +- pass through python flag to node-gyp [\#54](https://github.com/nodegit/nodegit/issues/54) + +- update package.json [\#28](https://github.com/nodegit/nodegit/issues/28) + +- Rewrite Notes [\#27](https://github.com/nodegit/nodegit/issues/27) + +- Tree each method is synchronous [\#15](https://github.com/nodegit/nodegit/issues/15) + +## v0.0.72 [(2013-03-06)](https://github.com/nodegit/nodegit/tree/v0.0.72) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.0.71...v0.0.72) + +## v0.0.71 [(2013-03-06)](https://github.com/nodegit/nodegit/tree/v0.0.71) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.0.6...v0.0.71) + +**Closed issues:** + +- Unable to load shared library [\#39](https://github.com/nodegit/nodegit/issues/39) + +- Expand Convenience Unit Tests [\#38](https://github.com/nodegit/nodegit/issues/38) + +- repo has no method 'branch' [\#35](https://github.com/nodegit/nodegit/issues/35) + +- update for node 0.5.9 [\#29](https://github.com/nodegit/nodegit/issues/29) + +**Merged pull requests:** + +- Converted from eio\_custom to uv\_queue\_work [\#48](https://github.com/nodegit/nodegit/pull/48) ([faceleg](https://github.com/faceleg)) + +- Fix Load-Order Bug [\#44](https://github.com/nodegit/nodegit/pull/44) ([fatlotus](https://github.com/fatlotus)) + +- Update documented commands needed to run tests [\#42](https://github.com/nodegit/nodegit/pull/42) ([shama](https://github.com/shama)) + +- Fix typo in README.md [\#41](https://github.com/nodegit/nodegit/pull/41) ([Skomski](https://github.com/Skomski)) + +- Issue 35: repo has no method 'branch' [\#40](https://github.com/nodegit/nodegit/pull/40) ([cholin](https://github.com/cholin)) + +- Refactor [\#37](https://github.com/nodegit/nodegit/pull/37) ([mmalecki](https://github.com/mmalecki)) + +## v0.0.6 [(2011-12-19)](https://github.com/nodegit/nodegit/tree/v0.0.6) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.0.4...v0.0.6) + +**Closed issues:** + +- commit event with undefined commit [\#26](https://github.com/nodegit/nodegit/issues/26) + +- Convenience methods are not convenience! [\#24](https://github.com/nodegit/nodegit/issues/24) + +**Merged pull requests:** + +- Node 0.6x fixes [\#34](https://github.com/nodegit/nodegit/pull/34) ([moneal](https://github.com/moneal)) + +## v0.0.4 [(2011-05-14)](https://github.com/nodegit/nodegit/tree/v0.0.4) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.0.3...v0.0.4) + +**Closed issues:** + +- repo.branch fails on empty repo [\#22](https://github.com/nodegit/nodegit/issues/22) + +- example/convenience-repo.js errors [\#21](https://github.com/nodegit/nodegit/issues/21) + +- Branch history each method is asynchronous [\#11](https://github.com/nodegit/nodegit/issues/11) + +## v0.0.3 [(2011-04-13)](https://github.com/nodegit/nodegit/tree/v0.0.3) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.0.2...v0.0.3) + +**Closed issues:** + +- error handling [\#18](https://github.com/nodegit/nodegit/issues/18) + +- Windows link issue [\#12](https://github.com/nodegit/nodegit/issues/12) + +## v0.0.2 [(2011-03-14)](https://github.com/nodegit/nodegit/tree/v0.0.2) + +[Full Changelog](https://github.com/nodegit/nodegit/compare/v0.0.1...v0.0.2) + +## v0.0.1 [(2011-03-10)](https://github.com/nodegit/nodegit/tree/v0.0.1) + + + +\* *This Change Log was automatically generated by [github_changelog_generator](https://github.com/skywinder/Github-Changelog-Generator)* diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 000000000..52a55d148 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,27 @@ +Contribution Guidelines +----------------------- + +### A Note on Issues and Support ## + +We try to be available pretty often to help when problems come up. We like to split incoming questions +into two categories: potential bugs/features, and questions. If you want a feature added, or think you've found a bug +in the code (or in the examples), search the [issue tracker](https://github.com/nodegit/nodegit/issues) and if you dont +find anything, file a new issue. If you just have questions, instead of using issues, contact us in our [Gitter room](https://gitter.im/nodegit/nodegit). + +## How to Help ## + +NodeGit is iterating pretty quickly, but it can always go faster. We welcome help with the deeper darker parts, +like the templates and binding and more, but there are plenty of smaller things to do as well. +Things that are always needed: + - Filing issues (see above). + - Writing tests (See [here](https://github.com/nodegit/nodegit/blob/master/TESTING.md)). + - Writing examples. + +These are all good easy ways to start getting involved with the project. You can also look through the issue tracker +and see if you can help with any existing issues. Please comment with your intention and any questions before getting +started; duplicating work or doing something that would be rejected always sucks. + +Additionally, [the documentation](http://www.nodegit.org) needs some love. Get in touch with one of us on Gitter if +you'd like to lend a hand with that. + +For anything else, Gitter is probably the best way to get in touch as well. Happy coding, merge you soon! diff --git a/FAQ.md b/FAQ.md new file mode 100644 index 000000000..6d136b820 --- /dev/null +++ b/FAQ.md @@ -0,0 +1,13 @@ +NodeGit FAQ +----------- + +Feel free to add common problems with their solutions here, or just anything that wasn't clear at first. + +#### Error: callback returned unsupported credentials type #### + +As seen in nodegit/#959 -- some golang hackers have started to use the following stanza in .gitconfig to allow `go get` to work with private repos: +``` +[url "git@github.com:"] + insteadOf = https://github.com/ +``` +But if you do this, code can call `NodeGit.Clone.clone(url: 'https://foo')` and have the `authentication` callback be asked for **SSH** credentials instead of HTTPS ones, which might not be what your application expected. diff --git a/HISTORY.md b/HISTORY.md new file mode 100644 index 000000000..20061a36e --- /dev/null +++ b/HISTORY.md @@ -0,0 +1,9 @@ +0.2.0 / 2014-11-24 +=================== + + * Shifted to promises; asynchronous methods now provide promises. + * Lots of new methods exposed for merging, pushing, blaming and more. + * SSH transport now works. + * Switched generation of native module code from ejsg to Combyne. + * Added continuous integration with Linux, OSX, and Windows. + * Many method and property names have changed. diff --git a/README.md b/README.md index 6ee58cc9b..c4adec119 100644 --- a/README.md +++ b/README.md @@ -3,158 +3,158 @@ NodeGit > Node bindings to the [libgit2](http://libgit2.github.com/) project. -[![Build -Status](https://travis-ci.org/tbranyen/nodegit.png)](https://travis-ci.org/nodegit/nodegit) -Build Status: Windows - -**Stable: 0.1.2** - -Maintained by Tim Branyen [@tbranyen](http://twitter.com/tbranyen), Michael -Robinson [@codeofinterest](http://twitter.com/codeofinterest), and Nick Kallen -[@nk](http://twitter.com/nk), with help from [awesome -contributors](https://github.com/tbranyen/nodegit/contributors)! + + + + + + + + + + + + + + + + + + +
LinuxOS XWindowsCoverageDependencies
+ + + + + Coverage Status + + +
+ +**Stable (libgit2#master): 0.16.0** +**Stable (libgit2@0.24): 0.14.1** + +## Have a problem? Come chat with us! ## + +https://libgit2.slack.com/ + +## Maintained by ## +Tim Branyen [@tbranyen](http://twitter.com/tbranyen), +John Haley [@johnhaley81](http://twitter.com/johnhaley81), and +Max Korp [@maxkorp](http://twitter.com/MaximilianoKorp) with help from tons of +[awesome contributors](https://github.com/nodegit/nodegit/contributors)! + +### Alumni Maintainers ### +Steve Smith [@orderedlist](https://twitter.com/orderedlist), +Michael Robinson [@codeofinterest](http://twitter.com/codeofinterest), and +Nick Kallen [@nk](http://twitter.com/nk) ## API Documentation. ## -http://www.nodegit.org/nodegit/ +[http://www.nodegit.org/](http://www.nodegit.org/) -## Building and Installing. ## +## Getting started. ## -Minimum dependencies: - -* [Python 2](https://www.python.org/) -* [CMake >= 2.8](http://www.cmake.org/) +NodeGit will work on most systems out-of-the-box without any native +dependencies. ``` bash npm install nodegit ``` -### Building manually: ### - -If you wish to help contribute to nodegit it is useful to build locally. - -``` bash -# Fetch this project. -git clone git://github.com/tbranyen/nodegit.git - -# Enter the repository. -cd nodegit - -# Install the template engine, run the code generation script, and install. -npm install ejs && npm run codegen && npm install -``` - -If you encounter errors, you most likely have not configured the dependencies -correctly. - -### Installing dependencies: ### - -#### OS X #### +If you receive errors about libstdc++, which are commonly experienced when +building on Travis-CI, you can fix this by upgrading to the latest +libstdc++-4.9. -Using Brew: +In Ubuntu: -``` bash -brew install cmake libzip +``` sh +sudo add-apt-repository ppa:ubuntu-toolchain-r/test +sudo apt-get update +sudo apt-get install libstdc++-4.9-dev ``` -#### Linux #### - -Using APT in Ubuntu: +In Travis: -``` bash -sudo apt-get install cmake libzip-dev build-essential +``` yaml +addons: + apt: + sources: + - ubuntu-toolchain-r-test + packages: + - libstdc++-4.9-dev ``` -Using Pacman in Arch Linux: +In CircleCI: -``` bash -sudo pacman -S cmake libzip base-devel +``` yaml + dependencies: + pre: + - sudo add-apt-repository -y ppa:ubuntu-toolchain-r/test + - sudo apt-get update + - sudo apt-get install -y libstdc++-4.9-dev ``` -#### Windows #### - -- [Download and install CMake](http://www.cmake.org/cmake/resources/software.html). -- [Download and install Python](https://www.python.org/download/windows). -- [Download and install VS Express](http://www.visualstudio.com/downloads/download-visual-studio-vs#d-express-windows-desktop). - -You may have to add a build flag to the installation process to successfully -install. Try first without, if the build fails, try again with the flag. - -*Allegedly the order in which you install Visual Studio could trigger this -error.* - -``` bash -npm install nodegit --msvs_version=2013 -# Or whatever version you've installed. -``` +If you are still encountering problems while installing, you should try the +[Building from source](http://www.nodegit.org/guides/install/from-source/) +instructions. ## API examples. ## ### Cloning a repository and reading a file: ### ``` javascript -var clone = require("nodegit").Repo.clone; - -// Clone a given repository into a specific folder. -clone("https://github.com/nodegit/nodegit", "tmp", null, function(err, repo) { - if (err) { - throw err; - } - - // Use a known commit sha from this repository. - var sha = "59b20b8d5c6ff8d09518454d4dd8b7b30f095ab5"; +var Git = require("nodegit"); +// Clone a given repository into the `./tmp` folder. +Git.Clone("https://github.com/nodegit/nodegit", "./tmp") // Look up this known commit. - repo.getCommit(sha, function(err, commit) { - if (err) { - throw error; - } - - // Look up a specific file within that commit. - commit.getEntry("README.md", function(err, entry) { - if (err) { - throw error; - } - - // Get the blob contents from the file. - entry.getBlob(function(err, blob) { - if (err) { - throw err; - } + .then(function(repo) { + // Use a known commit sha from this repository. + return repo.getCommit("59b20b8d5c6ff8d09518454d4dd8b7b30f095ab5"); + }) + // Look up a specific file within that commit. + .then(function(commit) { + return commit.getEntry("README.md"); + }) + // Get the blob contents from the file. + .then(function(entry) { + // Patch the blob to contain a reference to the entry. + return entry.getBlob().then(function(blob) { + blob.entry = entry; + return blob; + }); + }) + // Display information about the blob. + .then(function(blob) { + // Show the path, sha, and filesize in bytes. + console.log(blob.entry.path() + blob.entry.sha() + blob.rawsize() + "b"); - // Show the name, sha, and filesize in byes. - console.log(entry.name() + entry.sha() + blob.size() + "b"); + // Show a spacer. + console.log(Array(72).join("=") + "\n\n"); - // Show a spacer. - console.log(Array(72).join("=") + "\n\n"); + // Show the entire file. + console.log(String(blob)); + }) + .catch(function(err) { console.log(err); }); - // Show the entire file. - console.log(String(blob)); - }); - }); - }); -}); ``` ### Emulating git log: ### ``` javascript -var open = require("nodegit").Repo.open; +var Git = require("nodegit"); // Open the repository directory. -open("tmp", function(err, repo) { - if (err) { - throw err; - } - +Git.Repository.open("tmp") // Open the master branch. - repo.getMaster(function(err, branch) { - if (err) { - throw err; - } - + .then(function(repo) { + return repo.getMasterCommit(); + }) + // Display information about commits on master. + .then(function(firstCommitOnMaster) { // Create a new history event emitter. - var history = branch.history(); + var history = firstCommitOnMaster.history(); // Create a counter to only show up to 9 entries. var count = 0; @@ -173,7 +173,7 @@ open("tmp", function(err, repo) { var author = commit.author(); // Display author information. - console.log("Author:\t" + author.name() + " <", author.email() + ">"); + console.log("Author:\t" + author.name() + " <" + author.email() + ">"); // Show the commit date. console.log("Date:\t" + commit.date()); @@ -185,9 +185,10 @@ open("tmp", function(err, repo) { // Start emitting events. history.start(); }); -}); ``` +For more examples, check the `examples/` folder. + ## Unit tests. ## You will need to build locally before running the tests. See above. diff --git a/TESTING.md b/TESTING.md new file mode 100644 index 000000000..348bb8ed3 --- /dev/null +++ b/TESTING.md @@ -0,0 +1,85 @@ +Test Contribution Guidelines +---------------------------- + +This is a guide on how to contribute test cases to help with coverage testing for NodeGit. + +## Getting Started ## + +Currently there are a number of fields and functions in NodeGit that have no tests at all. A list of which files are missing and what fields and functions need tests can be generated by running + +``` bash +npm run generateMissingTests +``` + +>You should have run already `npm install .` or it will complain about +> missing `nodegit-promise` or suchlike + +This will make the file `generate/output/missing-tests.json` which will contain info for tests or files that are currently missing. + +From this file you can find fields and functions that don't have any tests yet and pick one to work on. + +## Adding a test ## + +After you find a test that's missing the next step is to find or add the file that you need to add it into. You can always use other tests in the directory as a guide for writing more. All new files will be automatically added during a test run. + +In the `missing-tests.json` file you'll see it formatted like so: + +```json +{ + "{className}":{ + "fields": [], + "functions": [] + } +} +``` + +In the file each `{className}` corresponds to a file found at `test/tests/{classname}`. Each entry in either `fields` or `functions` is a missing test for the respective field/function. + +In the file that your test is going in you can just append it to the file inside the `describe` function block. + +It can be helpful to reference the [libgit2 API docs](https://libgit2.github.com/libgit2/#v0.21.4) to know what the field or function is doing inside of libgit2 and referencing the [NodeGit API docs](http://www.nodegit.org/) can also help. Looking at examples inside of `/examples` can show you how we wrap the libgit2 library and how you can call into it from JavaScript. + +The idea is to test the basic functionality of the field/function and to confirm that it's returning or setting the value(s) correctly. Bugs inside of libgit2 will have to either have a work-around or be ignored. + +If a specific field or function is further wrapped via a file inside +of `/lib` then as long as that wrapper is called and tested it is OK. + +You can mark something to be ignored inside of the `/generate/missing-tests-ignore.json` file. + +After you write your test make sure to run `npm run generateMissingTests` again to confirm that the field/function that a test was written for no longer shows up. + +## Test results ## + +### The test passes ### + +Excellent!! Make sure that the test is working correctly and testing what you're expecting it to test and then move onto the [next section](https://github.com/nodegit/nodegit/tree/master/test#making-a-pull-request). + +### The test fails ### + +This is also great! You just found something that wasn't properly covered in our generate scripts for wrapping libgit2. We'll have to further analyze what's going on and figure out how to fix it. + +For bonus points you could also include a fix in your pull request but that step is optional. + +## Making a pull request ## + +So you made your self a new test for NodeGit and now you want to add it to the main repo? That's great! We'll try and make the process as simple and easy as possible for you. + +So assuming that you have a fork of the repo make a new branch that's labeled `new-tests-{className}` where {className} is the name of the file you added the tests to. Also, make sure you check the [main repo's pull request list](https://github.com/nodegit/nodegit/pulls) and see if somebody else is editing that file before you make your PR. They might have added a test already that's waiting to get merged in. + +So after you have your branch and your change is ready to go make sure your subjects for your commits contain the {className} of the tests you added and then list each new field/function being tested inside of the subject of the commit message. + +Example: + +``` +Added tests for oid + +fromString +allocfmt +inspect +``` + +This will help us know what each commit contains at a glance and should expedite merging your pull request. + +If your test is failing, TravisCI should pick it up and note it on the PR. PR's that add failing tests will have to be handled on a case-by-case basis but please don't let that stop you from staring a PR. + +Please don't start a PR until you're finished (no WIP test PRs please!). diff --git a/appveyor.yml b/appveyor.yml index 5dfc10162..da168034d 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -1,36 +1,63 @@ # appveyor file # http://www.appveyor.com/docs/appveyor-yml -project_id: "e5a5q75l9yfhnfv2" + +os: Windows Server 2012 R2 +image: Visual Studio 2015 + +platform: + - x86 + - x64 # build version format version: "{build}" +# Set a known clone folder +clone_folder: c:\projects\nodegit + # fix lineendings in Windows init: - git config --global core.autocrlf input + - git config --global user.name "John Doe" + - git config --global user.email johndoe@example.com # what combinations to test environment: + JOBS: 4 + GIT_SSH: c:\projects\nodegit\vendor\plink.exe + GYP_MSVS_VERSION: 2013 matrix: - - nodejs_version: 0.11 - - nodejs_version: 0.10 - - nodejs_version: 0.8 + # Node.js + - nodejs_version: "0.12" + - nodejs_version: "4.5" + - nodejs_version: "5.12" + - nodejs_version: "6.5" matrix: - allow_failures: - - nodejs_version: 0.11 + fast_finish: true # Get the latest stable version of Node 0.STABLE.latest install: - - ps: Update-NodeJsInstallation (Get-NodeJsLatestBuild $env:nodejs_version) - - cmd: SET PATH=C:\Program Files (x86)\MSBuild\12.0\bin\;%PATH% - - cmd: SET PATH=c:\python27;%PATH% + - ps: Install-Product node $env:nodejs_version $env:platform + - ps: Start-Process c:\projects\nodegit\vendor\pageant.exe c:\projects\nodegit\vendor\private.ppk + - npm install -g npm - cmd: npm install -g node-gyp - - npm install --msvs_version=2013 + - npm install test_script: - node --version - npm --version - cmd: npm test +on_success: + - IF %APPVEYOR_REPO_TAG%==true npm install -g node-pre-gyp + - IF %APPVEYOR_REPO_TAG%==true npm install -g aws-sdk + - IF %APPVEYOR_REPO_TAG%==true node lifecycleScripts\clean + - IF %APPVEYOR_REPO_TAG%==true node-pre-gyp package + - IF %APPVEYOR_REPO_TAG%==true node-pre-gyp publish + build: off + +branches: + only: + - master + - v0.3 diff --git a/binding.gyp b/binding.gyp deleted file mode 100644 index 6233c2826..000000000 --- a/binding.gyp +++ /dev/null @@ -1,80 +0,0 @@ -{ - "targets": [ - { - "target_name": "nodegit", - - "sources": [ - "src/base.cc", - "src/blob.cc", - "src/commit.cc", - "src/oid.cc", - "src/reference.cc", - "src/object.cc", - "src/repo.cc", - "src/index.cc", - "src/index_entry.cc", - "src/index_time.cc", - "src/tag.cc", - "src/revwalk.cc", - "src/signature.cc", - "src/time.cc", - "src/tree.cc", - "src/tree_builder.cc", - "src/tree_entry.cc", - "src/diff_find_options.cc", - "src/diff_options.cc", - "src/diff_list.cc", - "src/patch.cc", - "src/delta.cc", - "src/diff_file.cc", - "src/diff_range.cc", - "src/threads.cc", - "src/wrapper.cc", - "src/refdb.cc", - "src/odb_object.cc", - "src/odb.cc", - "src/submodule.cc", - "src/remote.cc", - "src/clone_options.cc", - "src/functions/copy.cc", - ], - - "include_dirs": [ - "vendor/libv8-convert", - "vendor/libgit2/include", - " - */ -Handle <%- cppClassName %>::<%- functionInfo.cppFunctionName %>(const Arguments& args) { - HandleScope scope; - <% var jsArg; -%> - <% include guardArguments.cc.ejs -%> - - if (args.Length() == <%- jsArg %> || !args[<%- jsArg %>]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - <%- functionInfo.cppFunctionName %>Baton* baton = new <%- functionInfo.cppFunctionName %>Baton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; -<% - for (var cArg = 0, jsArg = 0; cArg < functionInfo.args.length; cArg++) { - var arg = functionInfo.args[cArg]; --%> -<% if (!arg.isReturn) { -%> -<% if (arg.isSelf) { -%> - baton-><%- arg.name %>Reference = Persistent::New(args.This()); - baton-><%- arg.name %> = ObjectWrap::Unwrap<<%- cppClassName %>>(args.This())->GetValue(); -<% } else { -%> - baton-><%- arg.name %>Reference = Persistent::New(args[<%- jsArg %>]); - <% include convertFromV8.cc.ejs -%> - <% if (!arg.isPayload) { -%> - baton-><%- arg.name %> = from_<%- arg.name %>; - <% } -%> -<% } -%> -<% if (!(arg.isReturn || arg.isSelf || arg.isPayload)) jsArg++; -%> -<% } else { -%> -<% if (arg.shouldAlloc) { -%> - baton-><%- arg.name %> = (<%- arg.cType %>)malloc(sizeof(<%- arg.cType.replace('*', '') %>)); -<% } else { -%> -<% } -%> -<% } -%> -<% } -%> - baton->callback = Persistent::New(Local::Cast(args[<%- jsArg %>])); - - uv_queue_work(uv_default_loop(), &baton->request, <%- functionInfo.cppFunctionName %>Work, (uv_after_work_cb)<%- functionInfo.cppFunctionName %>AfterWork); - - return Undefined(); -} - -void <%- cppClassName %>::<%- functionInfo.cppFunctionName %>Work(uv_work_t *req) { - <%- functionInfo.cppFunctionName %>Baton *baton = static_cast<<%- functionInfo.cppFunctionName %>Baton *>(req->data); - <% if (functionInfo.return.cType != "void" || functionInfo.return.isErrorCode) { %><%- functionInfo.return.cType %> result = <% } %><%- functionInfo.cFunctionName %>( -<% - for (var i = 0; i < functionInfo.args.length; i++) { - var arg = functionInfo.args[i]; --%> - <% if (arg.isReturn && /\*\*/.test(arg.cType)) { %>&<% } %>baton-><%- arg.name %><% if (i < functionInfo.args.length - 1) { %>, <% } %> -<% } -%> - ); -<% if (functionInfo.return.isErrorCode) { -%> - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -<% } else if (functionInfo.return.cType != "void") { -%> - baton->result = result; -<% } -%> -} - -void <%- cppClassName %>::<%- functionInfo.cppFunctionName %>AfterWork(uv_work_t *req) { - HandleScope scope; - <%- functionInfo.cppFunctionName %>Baton *baton = static_cast<<%- functionInfo.cppFunctionName %>Baton *>(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { -<% if (!returns.length) { -%> - Handle result = Local::New(Undefined()); -<% } else if (returns.length == 1) { -%> -<% var to = {}; to.__proto__ = returns[0]; to.name = "baton->" + to.name; -%> - Handle to; - <% include convertToV8.cc.ejs -%> - Handle result = to; -<% } else { -%> - Handle result = Object::New(); - Handle to; -<% - for (r in returns) { - var to = returns[r]; --%> - <% include convertToV8.cc.ejs -%> - result->Set(String::NewSymbol("<%- to.jsName || to.name %>"), to); -<% } -%> -<% } -%> - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - <% - for (var i = 0; i < functionInfo.args.length; i++) { - var arg = functionInfo.args[i]; - if (!arg.shouldAlloc) continue; - -%> - free(baton-><%= arg.name %>); - <% } -%> - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } -<% - for (var i = 0, j = 0; i < functionInfo.args.length; i++) { - var arg = functionInfo.args[i]; - if (arg.isReturn) continue; --%> - baton-><%- arg.name %>Reference.Dispose(); -<% } -%> - baton->callback.Dispose(); -<% - for (var i = 0; i < functionInfo.args.length; i++) { - var arg = functionInfo.args[i]; --%> -<% if (['String', 'Array'].indexOf(arg.cppClassName) > -1) { -%> -<% if (arg.freeFunctionName) { %> - <%- arg.freeFunctionName %>(baton-><%- arg.name %>); -<% } else { -%> - free((void *)baton-><%- arg.name %>); -<% } -%> -<% } -%> -<% } -%> - delete baton; -} diff --git a/build/codegen/templates/class.cc.ejs b/build/codegen/templates/class.cc.ejs deleted file mode 100644 index 4cce71562..000000000 --- a/build/codegen/templates/class.cc.ejs +++ /dev/null @@ -1,161 +0,0 @@ -<% - function isV8Value(cppClassName) { - return ["Boolean", "Number", "String", "Integer", "Int32", "Uint32", "Date", "Function"].indexOf(cppClassName) > -1; - } - - function cppClassName2v8ValueClassName(cppClassName) { - if (isV8Value(cppClassName)) - return cppClassName; - else - return 'Object'; - } - - function isPointer(cType) { - return /\s*\*\s*$/.test(cType); - } - - function unPointer(cType) { - return cType.replace(/\s*\*\s*$/,''); - } - - function defaultValue(cType) { - if (cType === 'git_otype') { return 'GIT_OBJ_ANY'; } - else { return '0'; } - } --%> -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ -#include -#include -#include - -#include "git2.h" - -#include "../include/functions/copy.h" - -#include "../include/<%= filename %>" -<% if (typeof dependencies != 'undefined') { -%> -<% for (d in dependencies) { -%> -#include "<%- dependencies[d] %>" -<% } -%> -<% } -%> - -using namespace v8; -using namespace node; - -<% if (typeof cType != 'undefined') { -%> -<%- cppClassName %>::<%- cppClassName %>(<%- cType %> *raw) { - this->raw = raw; -} - -<%- cppClassName %>::~<%- cppClassName %>() { -<% if (typeof freeFunctionName != 'undefined') { -%> - <%- freeFunctionName %>(this->raw); -<% } -%> -} - -void <%- cppClassName %>::Initialize(Handle target) { - HandleScope scope; - - Local tpl = FunctionTemplate::New(New); - - tpl->InstanceTemplate()->SetInternalFieldCount(1); - tpl->SetClassName(String::NewSymbol("<%- jsClassName %>")); - -<% if (typeof functions != 'undefined') { -%> -<% - for (var i in functions) { - var functionInfo = functions[i]; - if (functionInfo.ignore) continue; --%> -<% if (functionInfo.isPrototypeMethod) { -%> - NODE_SET_PROTOTYPE_METHOD(tpl, "<%- functionInfo.jsFunctionName %>", <%- functionInfo.cppFunctionName %>); -<% } else { -%> - NODE_SET_METHOD(tpl, "<%- functionInfo.jsFunctionName %>", <%- functionInfo.cppFunctionName %>); -<% } -%> -<% } -%> -<% } -%> - -<% if (typeof fields != 'undefined') { -%> -<% - for (var i in fields) { - var fieldInfo = fields[i]; - if (fieldInfo.ignore) continue; --%> - NODE_SET_PROTOTYPE_METHOD(tpl, "<%- fieldInfo.jsFunctionName %>", <%- fieldInfo.cppFunctionName %>); -<% } -%> -<% } -%> - - constructor_template = Persistent::New(tpl->GetFunction()); - target->Set(String::NewSymbol("<%- jsClassName %>"), constructor_template); -} - -Handle <%- cppClassName %>::New(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsExternal()) { - return ThrowException(Exception::Error(String::New("<%= cType %> is required."))); - } - - <%- cppClassName %>* object = new <%- cppClassName %>((<%= cType%> *) External::Unwrap(args[0])); - object->Wrap(args.This()); - - return scope.Close(args.This()); -} - -Handle <%- cppClassName %>::New(void *raw) { - HandleScope scope; - Handle argv[1] = { External::New((void *)raw) }; - return scope.Close(<%- cppClassName %>::constructor_template->NewInstance(1, argv)); -} - -<%- cType %> *<%- cppClassName %>::GetValue() { - return this->raw; -} -<% } else { -%> -void <%- cppClassName %>::Initialize(Handle target) { - HandleScope scope; - - Persistent object = Persistent::New(Object::New()); - -<% if (typeof functions != 'undefined') { -%> -<% - for (var i in functions) { - var functionInfo = functions[i]; - if (functionInfo.ignore) continue; --%> - object->Set(String::NewSymbol("<%- functionInfo.jsFunctionName %>"), FunctionTemplate::New(<%- functionInfo.cppFunctionName %>)->GetFunction()); -<% } -%> -<% } -%> - - target->Set(String::NewSymbol("<%- jsClassName %>"), object); -} -<% } -%> - -<% if (typeof functions != 'undefined') { -%> -<% - for (var i in functions) { - var functionInfo = functions[i]; - if (functionInfo.ignore) continue; - - var returns = []; - for (var i = 0; i < functionInfo.args.length; i++) { - var arg = functionInfo.args[i]; - if (arg.isReturn) returns.push(arg); - } - if (!returns.length && !functionInfo.return.isErrorCode && functionInfo.return.cType != "void") returns.push(functionInfo.return); --%> - -<% if (functionInfo.isAsync) { -%> -<% include build/codegen/templates/asyncFunction.cc.ejs -%> -<% } else { -%> -<% include build/codegen/templates/syncFunction.cc.ejs -%> -<% } -%> -<% } -%> -<% } -%> -<% include build/codegen/templates/fields.cc.ejs -%> - -<% if (typeof cType != 'undefined') { -%> -Persistent <%- cppClassName %>::constructor_template; -<% } -%> diff --git a/build/codegen/templates/convertFromV8.cc.ejs b/build/codegen/templates/convertFromV8.cc.ejs deleted file mode 100644 index d1a442e34..000000000 --- a/build/codegen/templates/convertFromV8.cc.ejs +++ /dev/null @@ -1,32 +0,0 @@ -<% if (!arg.isPayload) { -%> - <%- arg.cType %> from_<%- arg.name %>; - <% if (arg.isOptional) { -%> - if (args[<%- jsArg %>]->Is<%- cppClassName2v8ValueClassName(arg.cppClassName) %>()) { - <% } -%> - <% if (arg.cppClassName == 'String') { -%> - String::Utf8Value <%- arg.name %>(args[<%- jsArg %>]->ToString()); - from_<%- arg.name %> = strdup(*<%- arg.name %>); - <% } else if (arg.cppClassName == 'Array') { -%> - Array *tmp_<%- arg.name %> = Array::Cast(*args[<%- jsArg %>]); - from_<%- arg.name %> = (<%- arg.cType %>)malloc(tmp_<%- arg.name %>->Length() * sizeof(<%- arg.cType.replace('**', '*') %>)); - for (unsigned int i = 0; i < tmp_<%- arg.name %>->Length(); i++) { - <% - // FIXME: should recursively call convertFromv8. - %> - from_<%- arg.name %>[i] = ObjectWrap::Unwrap<<%- arg.arrayElementCppClassName %>>(tmp_<%- arg.name %>->Get(Number::New(static_cast(i)))->ToObject())->GetValue(); - } - <% } else if (arg.cppClassName == "Function") { -%> - Persistent::New(Local::Cast(args[<%- jsArg %>])); - <% } else if (arg.cppClassName == 'Buffer') { -%> - from_<%- arg.name %> = Buffer::Data(args[<%- jsArg %>]->ToObject()); - <% } else if (isV8Value(arg.cppClassName)) { -%> - from_<%- arg.name %> = (<%- arg.cType %>) <%- arg.additionalCast %> <%- arg.cast %> args[<%- jsArg %>]->To<%- arg.cppClassName %>()->Value(); - <% } else { -%> - from_<%- arg.name %> = ObjectWrap::Unwrap<<%- arg.cppClassName %>>(args[<%- jsArg %>]->ToObject())->GetValue(); - <% } -%> - <% if (arg.isOptional) { -%> - } else { - from_<%- arg.name %> = 0; - } - <% } -%> -<% } -%> diff --git a/build/codegen/templates/convertToV8.cc.ejs b/build/codegen/templates/convertToV8.cc.ejs deleted file mode 100644 index 9918d948d..000000000 --- a/build/codegen/templates/convertToV8.cc.ejs +++ /dev/null @@ -1,35 +0,0 @@ -<% toName = to.name || 'result' -%> -<% if (to.cppClassName == "String") { -%> -<% if (typeof to.size != 'undefined') { -%> - to = String::New(<%- toName %>, <%- to.size %>); -<% } else { -%> - to = String::New(<%- toName %>); -<% } -%> -<% if (to.freeFunctionName) { -%> - <%- to.freeFunctionName %>(<%- toName %>); -<% } -%> -<% } else if (isV8Value(to.cppClassName)) { -%> - to = <%- to.cppClassName %>::New(<%- toName %>); -<% } else if (to.cppClassName == "External") { -%> - to = External::New((void *)<%- toName %>); -<% } else if (to.cppClassName == 'Array') { -%> -<% - // FIXME this is not general purpose enough. -%> - Local tmpArray = Array::New(<%- toName %>-><%- to.size %>); - for (unsigned int i = 0; i < <%- toName %>-><%- to.size %>; i++) { - tmpArray->Set(Number::New(i), String::New(<%- toName %>-><%- to.key %>[i])); - } - to = tmpArray; -<% } else { -%> -<% if (to.copy) { -%> - if (<%- toName %> != NULL) { - <%- toName %> = (<%- to.cType.replace('**', '*') %> <% if (!/\*/.test(to.cType)) {%>*<% } %>)<%- to.copy %>(<%- toName %>); - } -<% } -%> - if (<%- toName %> != NULL) { - to = <%- to.cppClassName %>::New((void *)<%- toName %>); - } else { - to = Null(); - } -<% } -%> diff --git a/build/codegen/templates/doc.cc.ejs b/build/codegen/templates/doc.cc.ejs deleted file mode 100644 index 44ca0979a..000000000 --- a/build/codegen/templates/doc.cc.ejs +++ /dev/null @@ -1,14 +0,0 @@ -<% - for (var i = 0; i < functionInfo.args.length; i++) { - var arg = functionInfo.args[i]; - if (arg.isReturn || arg.isSelf) continue; --%> - * @param {<%- arg.jsClassName %>} <%- arg.name %> -<% } -%> -<% for (var r = 0; r < returns.length; r++) { -%> -<% if (functionInfo.isAsync) { -%> - * @param {<%- returns[r].jsClassName || returns[r].cppClassName %>} callback -<% } else { -%> - * @return {<%- returns[r].jsClassName || returns[r].cppClassName %>} <%- returns[r].name || 'result' %> -<% } -%> -<% } -%> diff --git a/build/codegen/templates/fields.cc.ejs b/build/codegen/templates/fields.cc.ejs deleted file mode 100644 index 44c8fcb57..000000000 --- a/build/codegen/templates/fields.cc.ejs +++ /dev/null @@ -1,20 +0,0 @@ -<% if (typeof fields != 'undefined') { -%> -<% - for (var i in fields) { - var fieldInfo = fields[i]; - if (fieldInfo.ignore) continue; --%> - -Handle <%- cppClassName %>::<%- fieldInfo.cppFunctionName %>(const Arguments& args) { - HandleScope scope; - <% var to = fieldInfo; -%> - Handle to; - - <%- fieldInfo.cType %> <% if (!isV8Value(fieldInfo.cppClassName)) { %>*<% } %><%- fieldInfo.name %> = - <% if (!isV8Value(fieldInfo.cppClassName)) { %>&<% } %>ObjectWrap::Unwrap<<%- cppClassName %>>(args.This())->GetValue()-><%- fieldInfo.name %>; - - <% include convertToV8.cc.ejs -%> - return scope.Close(to); -} -<% } -%> -<% } -%> diff --git a/build/codegen/templates/guardArguments.cc.ejs b/build/codegen/templates/guardArguments.cc.ejs deleted file mode 100644 index a238d32e9..000000000 --- a/build/codegen/templates/guardArguments.cc.ejs +++ /dev/null @@ -1,13 +0,0 @@ -<% - var cArg = 0; - for (cArg = 0, jsArg = 0; cArg < functionInfo.args.length; cArg++) { - var arg = functionInfo.args[cArg]; - if (arg.isReturn || arg.isSelf || arg.isPayload) continue; --%> -<% if (!arg.isOptional) { -%> - if (args.Length() == <%- jsArg %> || !args[<%- jsArg %>]->Is<%- cppClassName2v8ValueClassName(arg.cppClassName) %>()) { - return ThrowException(Exception::Error(String::New("<%- arg.jsClassName %> <%- arg.name %> is required."))); - } -<% } -%> -<% jsArg++; -%> -<% } -%> diff --git a/build/codegen/templates/header.h.ejs b/build/codegen/templates/header.h.ejs deleted file mode 100644 index 1a31a7fe7..000000000 --- a/build/codegen/templates/header.h.ejs +++ /dev/null @@ -1,83 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ - -#ifndef <%- cppClassName.toUpperCase() %>_H -#define <%- cppClassName.toUpperCase() %>_H - -#include -#include -#include - -#include "git2.h" - -using namespace node; -using namespace v8; - -class <%- cppClassName %> : public ObjectWrap { - public: - - static Persistent constructor_template; - static void Initialize (Handle target); - -<% if (typeof cType != 'undefined') { -%> - <%- cType %> *GetValue(); - - static Handle New(void *raw); -<% } -%> - - private: -<% if (typeof cType != 'undefined') { -%> - <%- cppClassName %>(<%- cType %> *raw); - ~<%- cppClassName %>(); -<% } -%> - - static Handle New(const Arguments& args); - -<% if (typeof fields != 'undefined') { -%> -<% - for (var i in fields) { - var fieldInfo = fields[i]; - if (fieldInfo.ignore) continue; --%> - static Handle <%- fieldInfo.cppFunctionName %>(const Arguments& args); -<% } -%> -<% } -%> - -<% if (typeof functions != 'undefined') { -%> -<% - for (var i in functions) { - var functionInfo = functions[i]; - if (functionInfo.ignore) continue; --%> - static Handle <%- functionInfo.cppFunctionName %>(const Arguments& args); -<% if (functionInfo.isAsync) { -%> - static void <%- functionInfo.cppFunctionName %>Work(uv_work_t* req); - static void <%- functionInfo.cppFunctionName %>AfterWork(uv_work_t* req); - - struct <%- functionInfo.cppFunctionName %>Baton { - uv_work_t request; - int error_code; - const git_error* error; -<% - for (var i = 0; i < functionInfo.args.length; i++) { - var arg = functionInfo.args[i]; --%> -<% if (arg.isReturn) { -%> - <%- arg.cType.replace('**', '*') %> <%- arg.name %>; -<% } else { -%> - Persistent <%- arg.name %>Reference; - <%- arg.cType %> <%- arg.name %>; -<% } -%> -<% } -%> - Persistent callback; - }; -<% } -%> -<% } -%> -<% } -%> -<% if (typeof cType != 'undefined') { -%> - <%- cType %> *raw; -<% } -%> -}; - -#endif diff --git a/build/codegen/templates/syncFunction.cc.ejs b/build/codegen/templates/syncFunction.cc.ejs deleted file mode 100644 index a7c00aa05..000000000 --- a/build/codegen/templates/syncFunction.cc.ejs +++ /dev/null @@ -1,95 +0,0 @@ -/** -<% include doc.cc.ejs -%> - */ -Handle <%- cppClassName %>::<%- functionInfo.cppFunctionName %>(const Arguments& args) { - HandleScope scope; - <% include guardArguments.cc.ejs -%> - -<% - for (var i = 0; i < functionInfo.args.length; i++) { - var arg = functionInfo.args[i]; - if (!arg.isReturn) continue; --%> -<% if (arg.shouldAlloc) { -%> - <%- arg.cType %><%- arg.name %> = (<%- arg.cType %>)malloc(sizeof(<%- unPointer(arg.cType) %>)); -<% } else { -%> - <%- unPointer(arg.cType) %> <%- arg.name %> = <%- defaultValue(unPointer(arg.cType)) %>; -<% } -%> -<% } -%> -<% - for (var cArg = 0, jsArg = 0; cArg < functionInfo.args.length; cArg++) { - var arg = functionInfo.args[cArg]; - if (arg.isSelf || arg.isReturn || arg.isPayload) continue; --%> -<% include convertFromV8.cc.ejs -%> -<% jsArg++; -%> -<% } %> - <% if (returns.length || functionInfo.return.isErrorCode) { %><%- functionInfo.return.cType %> result = <% } %><%- functionInfo.cFunctionName %>( -<% - for (var cArg = 0, jsArg = 0; cArg < functionInfo.args.length; cArg++) { - var arg = functionInfo.args[cArg]; --%> - <% if (cArg > 0) { %>, <% } -%><% if (arg.isReturn && !arg.shouldAlloc) { %>&<% } -%> -<% if (arg.isSelf) { -%> -ObjectWrap::Unwrap<<%- cppClassName %>>(args.This())->GetValue() -<% } else if (arg.isReturn) { -%> -<%- arg.name %> -<% } else { -%> -from_<%- arg.name %> -<% } -%> -<% - if (!(arg.isReturn || arg.isSelf)) jsArg++; - } --%> - ); -<% - for (var i = 0; i < functionInfo.args.length; i++) { - var arg = functionInfo.args[i]; - if (arg.isSelf || arg.isReturn) continue; --%> -<% if (['String', 'Array'].indexOf(arg.cppClassName) > -1) { -%> -<% if (arg.freeFunctionName) { %> - <%- arg.freeFunctionName %>(from_<%- arg.name %>); -<% } else { -%> - free((void *)from_<%- arg.name %>); -<% } -%> -<% } -%> -<% } -%> -<% if (functionInfo.return.isErrorCode) { -%> - if (result != GIT_OK) { -<% - for (var i = 0; i < functionInfo.args.length; i++) { - var arg = functionInfo.args[i]; - if (!arg.shouldAlloc) continue; --%> - free(<%= arg.name %>); -<% } -%> - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } -<% } -%> - -<% if (!returns.length) { -%> - return Undefined(); -<% } else if (returns.length == 1) { -%> -<% var to = returns[0]; -%> - Handle to; - <% include convertToV8.cc.ejs -%> - return scope.Close(to); -<% } else { -%> - Handle toReturn = Object::New(); - Handle to; -<% - for (r in returns) { - var to = returns[r]; --%> - <% include convertToV8.cc.ejs -%> - toReturn->Set(String::NewSymbol("<%- to.jsName || to.name %>"), to); - -<% } -%> - return scope.Close(toReturn); -<% } -%> -} diff --git a/build/codegen/v0.18.0.json b/build/codegen/v0.18.0.json deleted file mode 100644 index 1671d2a81..000000000 --- a/build/codegen/v0.18.0.json +++ /dev/null @@ -1,17420 +0,0 @@ -[ - { - "filename": "attr.h", - "ignore": true, - "jsClassName": "Attr", - "cppClassName": "Attr", - "cType": "git_attr", - "functions": [ - { - "cFunctionName": "git_attr_get", - "args": [ - { - "name": "value_out", - "cType": "const char **", - "cppClassName": "String", - "jsClassName": "String", - "comment": "Output of the value of the attribute. Use the GIT_ATTR_... macros to test for TRUE, FALSE, UNSPECIFIED, etc. or just use the string value for attributes set to a value. You should NOT modify or free this value." - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "The repository containing the path." - }, - { - "name": "flags", - "cType": "uint32_t", - "cppClassName": "Uint32", - "jsClassName": "Uint32", - "comment": "A combination of GIT_ATTR_CHECK... flags." - }, - { - "name": "path", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "The path to check for attributes. Relative paths are interpreted relative to the repo root. The file does not have to exist, but if it does not, then it will be treated as a plain file (not a directory)." - }, - { - "name": "name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "The name of the attribute to look up." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "get", - "cppFunctionName": "Get", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Look up the value of one git attribute for path.

\n" - }, - { - "cFunctionName": "git_attr_get_many", - "args": [ - { - "name": "values_out", - "cType": "const char **", - "cppClassName": "String", - "jsClassName": "String", - "comment": "An array of num_attr entries that will have string pointers written into it for the values of the attributes. You should not modify or free the values that are written into this array (although of course, you should free the array itself if you allocated it)." - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "The repository containing the path." - }, - { - "name": "flags", - "cType": "uint32_t", - "cppClassName": "Uint32", - "jsClassName": "Uint32", - "comment": "A combination of GIT_ATTR_CHECK... flags." - }, - { - "name": "path", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "The path inside the repo to check attributes. This does not have to exist, but if it does not, then it will be treated as a plain file (i.e. not a directory)." - }, - { - "name": "num_attr", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "The number of attributes being looked up" - }, - { - "name": "names", - "cType": "const char **", - "cppClassName": "String", - "jsClassName": "String", - "comment": "An array of num_attr strings containing attribute names." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "getMany", - "cppFunctionName": "GetMany", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Look up a list of git attributes for path.

\n" - }, - { - "cFunctionName": "git_attr_foreach", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "The repository containing the path." - }, - { - "name": "flags", - "cType": "uint32_t", - "cppClassName": "Uint32", - "jsClassName": "Uint32", - "comment": "A combination of GIT_ATTR_CHECK... flags." - }, - { - "name": "path", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "Path inside the repo to check attributes. This does not have to exist, but if it does not, then it will be treated as a plain file (i.e. not a directory)." - }, - { - "name": "callback", - "cType": "git_attr_foreach_cb", - "cppClassName": "AttrForeachCb", - "jsClassName": "AttrForeachCb", - "comment": "Function to invoke on each attribute name and value. The value may be NULL is the attribute is explicitly set to UNSPECIFIED using the '!' sign. Callback will be invoked only once per attribute name, even if there are multiple rules for a given file. The highest priority rule will be used. Return a non-zero value from this to stop looping." - }, - { - "name": "payload", - "cType": "void *", - "cppClassName": "void", - "jsClassName": "void", - "comment": "Passed on as extra parameter to callback function." - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "foreach", - "cppFunctionName": "Foreach", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_EUSER on non-zero callback, or error code", - "jsClassName": "Number" - }, - "description": "

Loop over all the git attributes for a path.

\n" - }, - { - "cFunctionName": "git_attr_cache_flush", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "cacheFlush", - "cppFunctionName": "CacheFlush", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Flush the gitattributes cache.

\n" - }, - { - "cFunctionName": "git_attr_add_macro", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository" - }, - { - "name": "name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String" - }, - { - "name": "values", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "addMacro", - "cppFunctionName": "AddMacro", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Add a macro definition.

\n" - } - ] - }, - { - "filename": "blob.h", - "dependencies": [ - "../include/repo.h", - "../include/oid.h", - "../include/wrapper.h", - "node_buffer.h" - ], - "jsClassName": "Blob", - "cppClassName": "GitBlob", - "cType": "git_blob", - "freeFunctionName": "git_blob_free", - "functions": [ - { - "cFunctionName": "git_blob_free", - "args": [ - { - "name": "blob", - "cType": "git_blob *", - "cppClassName": "GitBlob", - "jsClassName": "Blob", - "comment": "the blob to close" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "isFree": true, - "jsFunctionName": "free", - "cppFunctionName": "Free", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Close an open blob

\n" - }, - { - "cFunctionName": "git_blob_id", - "args": [ - { - "name": "blob", - "cType": "const git_blob *", - "cppClassName": "GitBlob", - "jsClassName": "Blob", - "isSelf": true, - "comment": "a previously loaded blob." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "oid", - "cppFunctionName": "Oid", - "return": { - "cType": "const git_oid *", - "cppClassName": "GitOid", - "copy": "git_oid_dup", - "comment": "SHA1 hash for this blob.", - "jsClassName": "Oid" - }, - "description": "

Get the id of a blob.

\n" - }, - { - "cFunctionName": "git_blob_rawcontent", - "args": [ - { - "name": "blob", - "cType": "const git_blob *", - "cppClassName": "GitBlob", - "jsClassName": "Blob", - "isSelf": true, - "comment": "pointer to the blob" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "content", - "cppFunctionName": "Content", - "return": { - "cType": "const void *", - "cppClassName": "Wrapper", - "comment": "the pointer; NULL if the blob has no contents" - }, - "description": "

Get a read-only buffer with the raw content of a blob.

\n" - }, - { - "cFunctionName": "git_blob_rawsize", - "args": [ - { - "name": "blob", - "cType": "const git_blob *", - "cppClassName": "GitBlob", - "jsClassName": "Blob", - "isSelf": true, - "comment": "pointer to the blob" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "size", - "cppFunctionName": "Size", - "return": { - "cType": "git_off_t", - "cppClassName": "Number", - "comment": "size on bytes", - "jsClassName": "Number" - }, - "description": "

Get the size in bytes of the contents of a blob

\n" - }, - { - "cFunctionName": "git_blob_is_binary", - "args": [ - { - "name": "blob", - "cType": "git_blob *", - "cppClassName": "GitBlob", - "jsClassName": "Blob", - "isSelf": true, - "comment": "The blob which content should be analyzed" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "isBinary", - "cppFunctionName": "IsBinary", - "return": { - "cType": "int", - "cppClassName": "Boolean", - "comment": "1 if the content of the blob is detected as binary; 0 otherwise.", - "jsClassName": "Boolean" - }, - "description": "

Determine if the blob content is most certainly binary or not.

\n" - } - ] - }, - { - "filename": "branch.h", - "jsClassName": "Branch", - "cppClassName": "Branch", - "cType": "git_branch", - "freeFunctionName": "git_branch_free", - "functions": [ - { - "cFunctionName": "git_branch_create", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_reference **", - "cppClassName": "GitReference", - "jsClassName": "Reference", - "comment": "Pointer where to store the underlying reference." - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository" - }, - { - "name": "branch_name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "Name for the branch; this name is validated for consistency. It should also not conflict with an already existing branch name." - }, - { - "name": "target", - "cType": "const git_commit *", - "cppClassName": "GitCommit", - "jsClassName": "Commit", - "comment": "Commit to which this branch should point. This object must belong to the given `repo`." - }, - { - "name": "force", - "cType": "int", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "Overwrite existing branch." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "create", - "cppFunctionName": "Create", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0, GIT_EINVALIDSPEC or an error code. A proper reference is written in the refs/heads namespace pointing to the provided target commit.", - "jsClassName": "Number" - }, - "description": "

Create a new branch pointing at a target commit

\n" - }, - { - "cFunctionName": "git_branch_delete", - "args": [ - { - "name": "branch", - "cType": "git_reference *", - "cppClassName": "GitReference", - "jsClassName": "Reference", - "comment": "A valid reference representing a branch" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "delete", - "cppFunctionName": "Delete", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, or an error code.", - "jsClassName": "Number" - }, - "description": "

Delete an existing branch reference.

\n" - }, - { - "cFunctionName": "git_branch_foreach", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "Repository where to find the branches." - }, - { - "name": "list_flags", - "cType": "unsigned int", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "Filtering flags for the branch listing. Valid values are GIT_BRANCH_LOCAL, GIT_BRANCH_REMOTE or a combination of the two." - }, - { - "name": "branch_cb", - "cType": "git_branch_foreach_cb", - "cppClassName": "BranchForeachCb", - "jsClassName": "BranchForeachCb", - "comment": "Callback to invoke per found branch." - }, - { - "name": "payload", - "cType": "void *", - "cppClassName": "void", - "jsClassName": "void", - "comment": "Extra parameter to callback function." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "foreach", - "cppFunctionName": "Foreach", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_EUSER on non-zero callback, or error code", - "jsClassName": "Number" - }, - "description": "

Loop over all the branches and issue a callback for each one.

\n" - }, - { - "cFunctionName": "git_branch_move", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_reference **", - "cppClassName": "GitReference", - "jsClassName": "Reference" - }, - { - "name": "branch", - "cType": "git_reference *", - "cppClassName": "GitReference", - "jsClassName": "Reference", - "comment": "Current underlying reference of the branch." - }, - { - "name": "new_branch_name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "Target name of the branch once the move is performed; this name is validated for consistency." - }, - { - "name": "force", - "cType": "int", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "Overwrite existing branch." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "move", - "cppFunctionName": "Move", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_EINVALIDSPEC or an error code.", - "jsClassName": "Number" - }, - "description": "

Move/rename an existing local branch reference.

\n" - }, - { - "cFunctionName": "git_branch_lookup", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_reference **", - "cppClassName": "GitReference", - "jsClassName": "Reference", - "comment": "pointer to the looked-up branch reference" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "the repository to look up the branch" - }, - { - "name": "branch_name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "Name of the branch to be looked-up; this name is validated for consistency." - }, - { - "name": "branch_type", - "cType": "git_branch_t", - "cppClassName": "BranchT", - "jsClassName": "BranchT", - "comment": "Type of the considered branch. This should be valued with either GIT_BRANCH_LOCAL or GIT_BRANCH_REMOTE." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "lookup", - "cppFunctionName": "Lookup", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success; GIT_ENOTFOUND when no matching branch exists, GIT_EINVALIDSPEC, otherwise an error code.", - "jsClassName": "Number" - }, - "description": "

Lookup a branch by its name in a repository.

\n" - }, - { - "cFunctionName": "git_branch_name", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "const char **", - "cppClassName": "String", - "jsClassName": "String", - "comment": "where the pointer of branch name is stored; this is valid as long as the ref is not freed." - }, - { - "name": "ref", - "cType": "git_reference *", - "cppClassName": "GitReference", - "jsClassName": "Reference", - "comment": "the reference ideally pointing to a branch" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "name", - "cppFunctionName": "Name", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success; otherwise an error code (e.g., if the ref is no local or remote branch).", - "jsClassName": "Number" - }, - "description": "

Return the name of the given local or remote branch.

\n" - }, - { - "cFunctionName": "git_branch_upstream", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_reference **", - "cppClassName": "GitReference", - "jsClassName": "Reference", - "comment": "Pointer where to store the retrieved reference." - }, - { - "name": "branch", - "cType": "git_reference *", - "cppClassName": "GitReference", - "jsClassName": "Reference", - "comment": "Current underlying reference of the branch." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "upstream", - "cppFunctionName": "Upstream", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success; GIT_ENOTFOUND when no remote tracking reference exists, otherwise an error code.", - "jsClassName": "Number" - }, - "description": "

Return the reference supporting the remote tracking branch,\ngiven a local branch reference.

\n" - }, - { - "cFunctionName": "git_branch_set_upstream", - "args": [ - { - "name": "branch", - "cType": "git_reference *", - "cppClassName": "GitReference", - "jsClassName": "Reference", - "comment": "the branch to configure" - }, - { - "name": "upstream_name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "remote-tracking or local branch to set as upstream. Pass NULL to unset." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "setUpstream", - "cppFunctionName": "SetUpstream", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Set the upstream configuration for a given local branch

\n" - }, - { - "cFunctionName": "git_branch_upstream_name", - "args": [ - { - "name": "tracking_branch_name_out", - "cType": "char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "The user-allocated buffer which will be filled with the name of the reference. Pass NULL if you just want to get the needed size of the name of the reference as the output value." - }, - { - "name": "buffer_size", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "Size of the `out` buffer in bytes." - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "the repository where the branches live" - }, - { - "name": "canonical_branch_name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "name of the local branch." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "upstreamName", - "cppFunctionName": "UpstreamName", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "number of characters in the reference name including the trailing NUL byte; GIT_ENOTFOUND when no remote tracking reference exists, otherwise an error code.", - "jsClassName": "Number" - }, - "description": "

Return the name of the reference supporting the remote tracking branch,\ngiven the name of a local branch reference.

\n" - }, - { - "cFunctionName": "git_branch_is_head", - "args": [ - { - "name": "branch", - "cType": "git_reference *", - "cppClassName": "GitReference", - "jsClassName": "Reference", - "comment": "Current underlying reference of the branch." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "isHead", - "cppFunctionName": "IsHead", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "1 if HEAD points at the branch, 0 if it isn't, error code otherwise.", - "jsClassName": "Number" - }, - "description": "

Determine if the current local branch is pointed at by HEAD.

\n" - }, - { - "cFunctionName": "git_branch_remote_name", - "args": [ - { - "name": "remote_name_out", - "cType": "char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "The user-allocated buffer which will be filled with the name of the remote. Pass NULL if you just want to get the needed size of the name of the remote as the output value." - }, - { - "name": "buffer_size", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "Size of the `out` buffer in bytes." - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "The repository where the branch lives." - }, - { - "name": "canonical_branch_name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "name of the remote tracking branch." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "remoteName", - "cppFunctionName": "RemoteName", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "Number of characters in the reference name including the trailing NUL byte; GIT_ENOTFOUND when no remote matching remote was found, GIT_EAMBIGUOUS when the branch maps to several remotes, otherwise an error code.", - "jsClassName": "Number" - }, - "description": "

Return the name of remote that the remote tracking branch belongs to.

\n" - } - ] - }, - { - "filename": "checkout.h", - "ignore": true, - "jsClassName": "Checkout", - "cppClassName": "Checkout", - "cType": "git_checkout", - "freeFunctionName": "git_checkout_free", - "functions": [ - { - "cFunctionName": "git_checkout_head", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "repository to check out (must be non-bare)" - }, - { - "name": "opts", - "cType": "git_checkout_opts *", - "cppClassName": "CheckoutOpts", - "jsClassName": "CheckoutOpts", - "comment": "specifies checkout options (may be NULL)" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "head", - "cppFunctionName": "Head", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_EORPHANEDHEAD when HEAD points to a non existing branch, GIT_ERROR otherwise (use giterr_last for information about the error)", - "jsClassName": "Number" - }, - "description": "

Updates files in the index and the working tree to match the content of\nthe commit pointed at by HEAD.

\n" - }, - { - "cFunctionName": "git_checkout_index", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "repository into which to check out (must be non-bare)" - }, - { - "name": "index", - "cType": "git_index *", - "cppClassName": "GitIndex", - "jsClassName": "Index", - "comment": "index to be checked out (or NULL to use repository index)" - }, - { - "name": "opts", - "cType": "git_checkout_opts *", - "cppClassName": "CheckoutOpts", - "jsClassName": "CheckoutOpts", - "comment": "specifies checkout options (may be NULL)" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "index", - "cppFunctionName": "Index", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_ERROR otherwise (use giterr_last for information about the error)", - "jsClassName": "Number" - }, - "description": "

Updates files in the working tree to match the content of the index.

\n" - }, - { - "cFunctionName": "git_checkout_tree", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "repository to check out (must be non-bare)" - }, - { - "name": "treeish", - "cType": "const git_object *", - "cppClassName": "GitObject", - "jsClassName": "Object", - "comment": "a commit, tag or tree which content will be used to update the working directory" - }, - { - "name": "opts", - "cType": "git_checkout_opts *", - "cppClassName": "CheckoutOpts", - "jsClassName": "CheckoutOpts", - "comment": "specifies checkout options (may be NULL)" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "tree", - "cppFunctionName": "Tree", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_ERROR otherwise (use giterr_last for information about the error)", - "jsClassName": "Number" - }, - "description": "

Updates files in the index and working tree to match the content of the\ntree pointed at by the treeish.

\n" - } - ] - }, - { - "filename": "commit.h", - "dependencies": [ - "../include/oid.h", - "../include/repo.h", - "../include/signature.h", - "../include/tree.h" - ], - "jsClassName": "Commit", - "cppClassName": "GitCommit", - "cType": "git_commit", - "freeFunctionName": "git_commit_free", - "functions": [ - { - "cFunctionName": "git_commit_lookup_prefix", - "args": [ - { - "name": "commit", - "cType": "git_commit **", - "cppClassName": "GitCommit", - "jsClassName": "Commit", - "isReturn": true, - "comment": "pointer to the looked up commit" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "the repo to use when locating the commit." - }, - { - "name": "id", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "identity of the commit to locate. If the object is an annotated tag it will be peeled back to the commit." - }, - { - "name": "len", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "the length of the short identifier" - } - ], - "ignore": true, - "isAsync": true, - "isConstructorMethod": true, - "isPrototypeMethod": false, - "jsFunctionName": "lookupPrefix", - "cppFunctionName": "LookupPrefix", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Lookup a commit object from a repository,\ngiven a prefix of its identifier (short id).

\n" - }, - { - "cFunctionName": "git_commit_free", - "args": [ - { - "name": "commit", - "cType": "git_commit *", - "cppClassName": "GitCommit", - "jsClassName": "Commit", - "isSelf": true, - "comment": "the commit to close" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "isFree": true, - "jsFunctionName": "free", - "cppFunctionName": "Free", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Close an open commit

\n" - }, - { - "cFunctionName": "git_commit_id", - "args": [ - { - "name": "commit", - "cType": "const git_commit *", - "cppClassName": "GitCommit", - "jsClassName": "Commit", - "isSelf": true, - "comment": "a previously loaded commit." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "oid", - "cppFunctionName": "Oid", - "return": { - "cType": "const git_oid *", - "cppClassName": "GitOid", - "copy": "git_oid_dup", - "comment": "object identity for the commit.", - "jsClassName": "Oid" - }, - "description": "

Get the id of a commit.

\n" - }, - { - "cFunctionName": "git_commit_message_encoding", - "args": [ - { - "name": "commit", - "cType": "const git_commit *", - "cppClassName": "GitCommit", - "jsClassName": "Commit", - "isSelf": true, - "comment": "a previously loaded commit." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "messageEncoding", - "cppFunctionName": "MessageEncoding", - "return": { - "cType": "const char *", - "cppClassName": "String", - "comment": "NULL, or the encoding", - "jsClassName": "String" - }, - "description": "

Get the encoding for the message of a commit,\nas a string representing a standard encoding name.

\n" - }, - { - "cFunctionName": "git_commit_message", - "args": [ - { - "name": "commit", - "cType": "const git_commit *", - "cppClassName": "GitCommit", - "jsClassName": "Commit", - "isSelf": true, - "comment": "a previously loaded commit." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "message", - "cppFunctionName": "Message", - "return": { - "cType": "const char *", - "cppClassName": "String", - "comment": "the message of a commit", - "jsClassName": "String" - }, - "description": "

Get the full message of a commit.

\n" - }, - { - "cFunctionName": "git_commit_time", - "args": [ - { - "name": "commit", - "cType": "const git_commit *", - "cppClassName": "GitCommit", - "jsClassName": "Commit", - "isSelf": true, - "comment": "a previously loaded commit." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "time", - "cppFunctionName": "Time", - "return": { - "cType": "git_time_t", - "cppClassName": "Number", - "comment": "the time of a commit", - "jsClassName": "Number" - }, - "description": "

Get the commit time (i.e. committer time) of a commit.

\n" - }, - { - "cFunctionName": "git_commit_time_offset", - "args": [ - { - "name": "commit", - "cType": "const git_commit *", - "cppClassName": "GitCommit", - "jsClassName": "Commit", - "isSelf": true, - "comment": "a previously loaded commit." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "offset", - "cppFunctionName": "Offset", - "return": { - "cType": "int", - "cppClassName": "Integer", - "comment": "positive or negative timezone offset, in minutes from UTC", - "jsClassName": "Number" - }, - "description": "

Get the commit timezone offset (i.e. committer's preferred timezone) of a commit.

\n" - }, - { - "cFunctionName": "git_commit_committer", - "args": [ - { - "name": "commit", - "cType": "const git_commit *", - "cppClassName": "GitCommit", - "jsClassName": "Commit", - "isSelf": true, - "comment": "a previously loaded commit." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "committer", - "cppFunctionName": "Committer", - "return": { - "cType": "const git_signature *", - "cppClassName": "GitSignature", - "copy": "git_signature_dup", - "comment": "the committer of a commit", - "jsClassName": "Signature" - }, - "description": "

Get the committer of a commit.

\n" - }, - { - "cFunctionName": "git_commit_author", - "args": [ - { - "name": "commit", - "cType": "const git_commit *", - "cppClassName": "GitCommit", - "jsClassName": "Commit", - "isSelf": true, - "comment": "a previously loaded commit." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "author", - "cppFunctionName": "Author", - "return": { - "cType": "const git_signature *", - "cppClassName": "GitSignature", - "copy": "git_signature_dup", - "comment": "the author of a commit", - "jsClassName": "Signature" - }, - "description": "

Get the author of a commit.

\n" - }, - { - "cFunctionName": "git_commit_tree", - "args": [ - { - "name": "tree_out", - "cType": "git_tree **", - "cppClassName": "GitTree", - "jsClassName": "Tree", - "isReturn": true, - "comment": "pointer where to store the tree object" - }, - { - "name": "commit", - "cType": "const git_commit *", - "cppClassName": "GitCommit", - "jsClassName": "Commit", - "isSelf": true, - "comment": "a previously loaded commit." - } - ], - "ignore": true, - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "getTree", - "cppFunctionName": "GetTree", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Get the tree pointed to by a commit.

\n" - }, - { - "cFunctionName": "git_commit_tree_id", - "args": [ - { - "name": "commit", - "cType": "const git_commit *", - "cppClassName": "GitCommit", - "jsClassName": "Commit", - "isSelf": true, - "comment": "a previously loaded commit." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "treeId", - "cppFunctionName": "TreeId", - "return": { - "cType": "const git_oid *", - "cppClassName": "GitOid", - "copy": "git_oid_dup", - "comment": "the id of tree pointed to by commit.", - "jsClassName": "Oid" - }, - "description": "

Get the id of the tree pointed to by a commit. This differs from\ngit_commit_tree in that no attempts are made to fetch an object\nfrom the ODB.

\n" - }, - { - "cFunctionName": "git_commit_parentcount", - "args": [ - { - "name": "commit", - "cType": "const git_commit *", - "cppClassName": "GitCommit", - "jsClassName": "Commit", - "isSelf": true, - "comment": "a previously loaded commit." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "parentCount", - "cppFunctionName": "ParentCount", - "return": { - "cType": "unsigned int", - "cppClassName": "Uint32", - "comment": "integer of count of parents", - "jsClassName": "Number" - }, - "description": "

Get the number of parents of this commit

\n" - }, - { - "cFunctionName": "git_commit_parent", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_commit **", - "cppClassName": "GitCommit", - "jsClassName": "Commit", - "comment": "Pointer where to store the parent commit" - }, - { - "name": "commit", - "cType": "git_commit *", - "cppClassName": "GitCommit", - "jsClassName": "Commit", - "isSelf": true, - "comment": "a previously loaded commit." - }, - { - "name": "n", - "cType": "unsigned int", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "the position of the parent (from 0 to `parentcount`)" - } - ], - "ignore": true, - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "parent", - "cppFunctionName": "Parent", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Get the specified parent of the commit.

\n" - }, - { - "cFunctionName": "git_commit_parent_id", - "args": [ - { - "name": "commit", - "cType": "git_commit *", - "cppClassName": "GitCommit", - "jsClassName": "Commit", - "isSelf": true, - "comment": "a previously loaded commit." - }, - { - "name": "n", - "cType": "unsigned int", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "the position of the parent (from 0 to `parentcount`)" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "parentId", - "cppFunctionName": "ParentId", - "return": { - "cType": "const git_oid *", - "cppClassName": "GitOid", - "copy": "git_oid_dup", - "comment": "the id of the parent, NULL on error.", - "jsClassName": "Oid" - }, - "description": "

Get the oid of a specified parent for a commit. This is different from\ngit_commit_parent, which will attempt to load the parent commit from\nthe ODB.

\n" - }, - { - "cFunctionName": "git_commit_nth_gen_ancestor", - "args": [ - { - "name": "ancestor", - "cType": "git_commit **", - "cppClassName": "GitCommit", - "jsClassName": "Commit", - "isReturn": true, - "comment": "Pointer where to store the ancestor commit" - }, - { - "name": "commit", - "cType": "const git_commit *", - "cppClassName": "GitCommit", - "jsClassName": "Commit", - "isSelf": true, - "comment": "a previously loaded commit." - }, - { - "name": "n", - "cType": "unsigned int", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "the requested generation" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "nthGenAncestor", - "cppFunctionName": "NthGenAncestor", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success; GIT_ENOTFOUND if no matching ancestor exists or an error code", - "jsClassName": "Number" - }, - "description": "

Get the commit object that is the th generation ancestor\nof the named commit object, following only the first parents.\nThe returned commit has to be freed by the caller.

\n" - }, - { - "cFunctionName": "git_commit_create_v", - "args": [ - { - "name": "id", - "cType": "git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "shouldAlloc": true, - "isReturn": true - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository" - }, - { - "name": "update_ref", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String" - }, - { - "name": "author", - "cType": "const git_signature *", - "cppClassName": "GitSignature", - "jsClassName": "Signature" - }, - { - "name": "committer", - "cType": "const git_signature *", - "cppClassName": "GitSignature", - "jsClassName": "Signature" - }, - { - "name": "message_encoding", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String" - }, - { - "name": "message", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String" - }, - { - "name": "tree", - "cType": "const git_tree *", - "cppClassName": "GitTree", - "jsClassName": "Tree" - }, - { - "name": "parent_count", - "cType": "int", - "cppClassName": "Int32", - "jsClassName": "Number" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "createV", - "cppFunctionName": "CreateV", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Create new commit in the repository using a variable argument list.

\n" - } - ] - }, - { - "filename": "common.h", - "ignore": true, - "jsClassName": "Common", - "cppClassName": "Common", - "cType": "git_common", - "functions": [ - { - "cFunctionName": "git_libgit2_version", - "args": [ - { - "name": "major", - "cType": "int *", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "Store the major version number" - }, - { - "name": "minor", - "cType": "int *", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "Store the minor version number" - }, - { - "name": "rev", - "cType": "int *", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "Store the revision (patch) number" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "gitLibgit2Version", - "cppFunctionName": "GitLibgit2Version", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Return the version of the libgit2 library\nbeing currently used.

\n" - }, - { - "cFunctionName": "git_libgit2_capabilities", - "args": [], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "gitLibgit2Capabilities", - "cppFunctionName": "GitLibgit2Capabilities", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "A combination of GIT_CAP_* values. - GIT_CAP_THREADS Libgit2 was compiled with thread support. Note that thread support is still to be seen as a 'work in progress' - basic object lookups are believed to be threadsafe, but other operations may not be. - GIT_CAP_HTTPS Libgit2 supports the https:// protocol. This requires the openssl library to be found when compiling libgit2.", - "jsClassName": "Number" - }, - "description": "

Query compile time options for libgit2.

\n" - }, - { - "cFunctionName": "git_libgit2_opts", - "args": [ - { - "name": "option", - "cType": "int", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "Option key" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "gitLibgit2Opts", - "cppFunctionName": "GitLibgit2Opts", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, <0 on failure", - "jsClassName": "Number" - }, - "description": "

Set or query a library global option

\n" - } - ] - }, - { - "filename": "config.h", - "ignore": true, - "jsClassName": "Config", - "cppClassName": "Config", - "cType": "git_config", - "freeFunctionName": "git_config_free", - "functions": [ - { - "cFunctionName": "git_config_find_global", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "Buffer to store the path in" - }, - { - "name": "length", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "size of the buffer in bytes" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "findGlobal", - "cppFunctionName": "FindGlobal", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 if a global configuration file has been found. Its path will be stored in `buffer`.", - "jsClassName": "Number" - }, - "description": "

Locate the path to the global configuration file

\n" - }, - { - "cFunctionName": "git_config_find_xdg", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "Buffer to store the path in" - }, - { - "name": "length", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "size of the buffer in bytes" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "findXdg", - "cppFunctionName": "FindXdg", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 if a xdg compatible configuration file has been found. Its path will be stored in `buffer`.", - "jsClassName": "Number" - }, - "description": "

Locate the path to the global xdg compatible configuration file

\n" - }, - { - "cFunctionName": "git_config_find_system", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "Buffer to store the path in" - }, - { - "name": "length", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "size of the buffer in bytes" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "findSystem", - "cppFunctionName": "FindSystem", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 if a system configuration file has been found. Its path will be stored in `buffer`.", - "jsClassName": "Number" - }, - "description": "

Locate the path to the system configuration file

\n" - }, - { - "cFunctionName": "git_config_open_default", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_config **", - "cppClassName": "Config", - "jsClassName": "Config", - "comment": "Pointer to store the config instance" - } - ], - "isAsync": false, - "isConstructorMethod": true, - "isPrototypeMethod": false, - "jsFunctionName": "openDefault", - "cppFunctionName": "OpenDefault", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Open the global, XDG and system configuration files

\n" - }, - { - "cFunctionName": "git_config_new", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_config **", - "cppClassName": "Config", - "jsClassName": "Config", - "comment": "pointer to the new configuration" - } - ], - "isAsync": false, - "isConstructorMethod": true, - "isPrototypeMethod": false, - "jsFunctionName": "new", - "cppFunctionName": "New", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Allocate a new configuration object

\n" - }, - { - "cFunctionName": "git_config_add_backend", - "args": [ - { - "name": "cfg", - "cType": "git_config *", - "cppClassName": "Config", - "jsClassName": "Config", - "isSelf": true, - "comment": "the configuration to add the file to" - }, - { - "name": "file", - "cType": "git_config_backend *", - "cppClassName": "ConfigBackend", - "jsClassName": "ConfigBackend", - "comment": "the configuration file (backend) to add" - }, - { - "name": "level", - "cType": "unsigned int", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "the priority level of the backend" - }, - { - "name": "force", - "cType": "int", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "if a config file already exists for the given priority level, replace it" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "addBackend", - "cppFunctionName": "AddBackend", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_EEXISTS when adding more than one file for a given priority level (and force_replace set to 0), or error code", - "jsClassName": "Number" - }, - "description": "

Add a generic config file instance to an existing config

\n" - }, - { - "cFunctionName": "git_config_add_file_ondisk", - "args": [ - { - "name": "cfg", - "cType": "git_config *", - "cppClassName": "Config", - "jsClassName": "Config", - "isSelf": true, - "comment": "the configuration to add the file to" - }, - { - "name": "path", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "path to the configuration file to add" - }, - { - "name": "level", - "cType": "unsigned int", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "the priority level of the backend" - }, - { - "name": "force", - "cType": "int", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "replace config file at the given priority level" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "addFileOndisk", - "cppFunctionName": "AddFileOndisk", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_EEXISTS when adding more than one file for a given priority level (and force_replace set to 0), GIT_ENOTFOUND when the file doesn't exist or error code", - "jsClassName": "Number" - }, - "description": "

Add an on-disk config file instance to an existing config

\n" - }, - { - "cFunctionName": "git_config_open_ondisk", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_config **", - "cppClassName": "Config", - "jsClassName": "Config", - "comment": "The configuration instance to create" - }, - { - "name": "path", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "Path to the on-disk file to open" - } - ], - "isAsync": false, - "isConstructorMethod": true, - "isPrototypeMethod": false, - "jsFunctionName": "openOndisk", - "cppFunctionName": "OpenOndisk", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_ENOTFOUND when the file doesn't exist or an error code", - "jsClassName": "Number" - }, - "description": "

Create a new config instance containing a single on-disk file

\n" - }, - { - "cFunctionName": "git_config_open_level", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_config **", - "cppClassName": "Config", - "jsClassName": "Config", - "comment": "The configuration instance to create" - }, - { - "name": "parent", - "cType": "const git_config *", - "cppClassName": "Config", - "jsClassName": "Config", - "comment": "Multi-level config to search for the given level" - }, - { - "name": "level", - "cType": "unsigned int", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "Configuration level to search for" - } - ], - "isAsync": false, - "isConstructorMethod": true, - "isPrototypeMethod": false, - "jsFunctionName": "openLevel", - "cppFunctionName": "OpenLevel", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0, GIT_ENOTFOUND if the passed level cannot be found in the multi-level parent config, or an error code", - "jsClassName": "Number" - }, - "description": "

Build a single-level focused config object from a multi-level one.

\n" - }, - { - "cFunctionName": "git_config_refresh", - "args": [ - { - "name": "cfg", - "cType": "git_config *", - "cppClassName": "Config", - "jsClassName": "Config", - "isSelf": true, - "comment": "The configuration to refresh" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "refresh", - "cppFunctionName": "Refresh", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Reload changed config files

\n" - }, - { - "cFunctionName": "git_config_free", - "args": [ - { - "name": "cfg", - "cType": "git_config *", - "cppClassName": "Config", - "jsClassName": "Config", - "comment": "the configuration to free" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "isFree": true, - "jsFunctionName": "free", - "cppFunctionName": "Free", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Free the configuration and its associated memory and files

\n" - }, - { - "cFunctionName": "git_config_get_entry", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "const git_config_entry **", - "cppClassName": "ConfigEntry", - "jsClassName": "ConfigEntry", - "comment": "pointer to the variable git_config_entry" - }, - { - "name": "cfg", - "cType": "const git_config *", - "cppClassName": "Config", - "jsClassName": "Config", - "comment": "where to look for the variable" - }, - { - "name": "name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the variable's name" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "getEntry", - "cppFunctionName": "GetEntry", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Get the git_config_entry of a config variable.

\n" - }, - { - "cFunctionName": "git_config_get_int32", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "int32_t *", - "cppClassName": "int32_t", - "jsClassName": "int32_t", - "comment": "pointer to the variable where the value should be stored" - }, - { - "name": "cfg", - "cType": "const git_config *", - "cppClassName": "Config", - "jsClassName": "Config", - "comment": "where to look for the variable" - }, - { - "name": "name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the variable's name" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "getInt32", - "cppFunctionName": "GetInt32", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Get the value of an integer config variable.

\n" - }, - { - "cFunctionName": "git_config_get_int64", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "int64_t *", - "cppClassName": "int64_t", - "jsClassName": "int64_t", - "comment": "pointer to the variable where the value should be stored" - }, - { - "name": "cfg", - "cType": "const git_config *", - "cppClassName": "Config", - "jsClassName": "Config", - "comment": "where to look for the variable" - }, - { - "name": "name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the variable's name" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "getInt64", - "cppFunctionName": "GetInt64", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Get the value of a long integer config variable.

\n" - }, - { - "cFunctionName": "git_config_get_bool", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "int *", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "pointer to the variable where the value should be stored" - }, - { - "name": "cfg", - "cType": "const git_config *", - "cppClassName": "Config", - "jsClassName": "Config", - "comment": "where to look for the variable" - }, - { - "name": "name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the variable's name" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "getBool", - "cppFunctionName": "GetBool", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Get the value of a boolean config variable.

\n" - }, - { - "cFunctionName": "git_config_get_string", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "const char **", - "cppClassName": "String", - "jsClassName": "String", - "comment": "pointer to the variable's value" - }, - { - "name": "cfg", - "cType": "const git_config *", - "cppClassName": "Config", - "jsClassName": "Config", - "comment": "where to look for the variable" - }, - { - "name": "name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the variable's name" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "getString", - "cppFunctionName": "GetString", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Get the value of a string config variable.

\n" - }, - { - "cFunctionName": "git_config_get_multivar", - "args": [ - { - "name": "cfg", - "cType": "const git_config *", - "cppClassName": "Config", - "jsClassName": "Config", - "isSelf": true, - "comment": "where to look for the variable" - }, - { - "name": "name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the variable's name" - }, - { - "name": "regexp", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "regular expression to filter which variables we're interested in. Use NULL to indicate all" - }, - { - "name": "callback", - "cType": "git_config_foreach_cb", - "cppClassName": "ConfigForeachCb", - "jsClassName": "ConfigForeachCb", - "comment": "the function to be called on each value of the variable" - }, - { - "name": "payload", - "cType": "void *", - "cppClassName": "void", - "jsClassName": "void", - "comment": "opaque pointer to pass to the callback" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "getMultivar", - "cppFunctionName": "GetMultivar", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Get each value of a multivar.

\n" - }, - { - "cFunctionName": "git_config_set_int32", - "args": [ - { - "name": "cfg", - "cType": "git_config *", - "cppClassName": "Config", - "jsClassName": "Config", - "isSelf": true, - "comment": "where to look for the variable" - }, - { - "name": "name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the variable's name" - }, - { - "name": "value", - "cType": "int32_t", - "cppClassName": "int32_t", - "jsClassName": "int32_t", - "comment": "Integer value for the variable" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "setInt32", - "cppFunctionName": "SetInt32", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Set the value of an integer config variable in the config file\nwith the highest level (usually the local one).

\n" - }, - { - "cFunctionName": "git_config_set_int64", - "args": [ - { - "name": "cfg", - "cType": "git_config *", - "cppClassName": "Config", - "jsClassName": "Config", - "isSelf": true, - "comment": "where to look for the variable" - }, - { - "name": "name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the variable's name" - }, - { - "name": "value", - "cType": "int64_t", - "cppClassName": "int64_t", - "jsClassName": "int64_t", - "comment": "Long integer value for the variable" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "setInt64", - "cppFunctionName": "SetInt64", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Set the value of a long integer config variable in the config file\nwith the highest level (usually the local one).

\n" - }, - { - "cFunctionName": "git_config_set_bool", - "args": [ - { - "name": "cfg", - "cType": "git_config *", - "cppClassName": "Config", - "jsClassName": "Config", - "isSelf": true, - "comment": "where to look for the variable" - }, - { - "name": "name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the variable's name" - }, - { - "name": "value", - "cType": "int", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "the value to store" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "setBool", - "cppFunctionName": "SetBool", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Set the value of a boolean config variable in the config file\nwith the highest level (usually the local one).

\n" - }, - { - "cFunctionName": "git_config_set_string", - "args": [ - { - "name": "cfg", - "cType": "git_config *", - "cppClassName": "Config", - "jsClassName": "Config", - "isSelf": true, - "comment": "where to look for the variable" - }, - { - "name": "name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the variable's name" - }, - { - "name": "value", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the string to store." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "setString", - "cppFunctionName": "SetString", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Set the value of a string config variable in the config file\nwith the highest level (usually the local one).

\n" - }, - { - "cFunctionName": "git_config_set_multivar", - "args": [ - { - "name": "cfg", - "cType": "git_config *", - "cppClassName": "Config", - "jsClassName": "Config", - "isSelf": true, - "comment": "where to look for the variable" - }, - { - "name": "name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the variable's name" - }, - { - "name": "regexp", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "a regular expression to indicate which values to replace" - }, - { - "name": "value", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the new value." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "setMultivar", - "cppFunctionName": "SetMultivar", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Set a multivar in the local config file.

\n" - }, - { - "cFunctionName": "git_config_delete_entry", - "args": [ - { - "name": "cfg", - "cType": "git_config *", - "cppClassName": "Config", - "jsClassName": "Config", - "isSelf": true, - "comment": "the configuration" - }, - { - "name": "name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the variable to delete" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "deleteEntry", - "cppFunctionName": "DeleteEntry", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Delete a config variable from the config file\nwith the highest level (usually the local one).

\n" - }, - { - "cFunctionName": "git_config_foreach", - "args": [ - { - "name": "cfg", - "cType": "const git_config *", - "cppClassName": "Config", - "jsClassName": "Config", - "isSelf": true, - "comment": "where to get the variables from" - }, - { - "name": "callback", - "cType": "git_config_foreach_cb", - "cppClassName": "ConfigForeachCb", - "jsClassName": "ConfigForeachCb", - "comment": "the function to call on each variable" - }, - { - "name": "payload", - "cType": "void *", - "cppClassName": "void", - "jsClassName": "void", - "comment": "the data to pass to the callback" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "foreach", - "cppFunctionName": "Foreach", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_EUSER on non-zero callback, or error code", - "jsClassName": "Number" - }, - "description": "

Perform an operation on each config variable.

\n" - }, - { - "cFunctionName": "git_config_foreach_match", - "args": [ - { - "name": "cfg", - "cType": "const git_config *", - "cppClassName": "Config", - "jsClassName": "Config", - "isSelf": true, - "comment": "where to get the variables from" - }, - { - "name": "regexp", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "regular expression to match against config names" - }, - { - "name": "callback", - "cType": "git_config_foreach_cb", - "cppClassName": "ConfigForeachCb", - "jsClassName": "ConfigForeachCb", - "comment": "the function to call on each variable" - }, - { - "name": "payload", - "cType": "void *", - "cppClassName": "void", - "jsClassName": "void", - "comment": "the data to pass to the callback" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "foreachMatch", - "cppFunctionName": "ForeachMatch", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or the return value of the callback which didn't return 0", - "jsClassName": "Number" - }, - "description": "

Perform an operation on each config variable matching a regular expression.

\n" - }, - { - "cFunctionName": "git_config_get_mapped", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "int *", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "place to store the result of the mapping" - }, - { - "name": "cfg", - "cType": "const git_config *", - "cppClassName": "Config", - "jsClassName": "Config", - "comment": "config file to get the variables from" - }, - { - "name": "name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "name of the config variable to lookup" - }, - { - "name": "maps", - "cType": "const git_cvar_map *", - "cppClassName": "CvarMap", - "jsClassName": "CvarMap", - "comment": "array of `git_cvar_map` objects specifying the possible mappings" - }, - { - "name": "map_n", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "number of mapping objects in `maps`" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "getMapped", - "cppFunctionName": "GetMapped", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, error code otherwise", - "jsClassName": "Number" - }, - "description": "

Query the value of a config variable and return it mapped to\nan integer constant.

\n" - }, - { - "cFunctionName": "git_config_lookup_map_value", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "int *", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "place to store the result of the parsing" - }, - { - "name": "maps", - "cType": "const git_cvar_map *", - "cppClassName": "CvarMap", - "jsClassName": "CvarMap", - "comment": "array of `git_cvar_map` objects specifying the possible mappings" - }, - { - "name": "map_n", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "number of mapping objects in `maps`" - }, - { - "name": "value", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "value to parse" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "lookupMapValue", - "cppFunctionName": "LookupMapValue", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Maps a string value to an integer constant

\n" - }, - { - "cFunctionName": "git_config_parse_bool", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "int *", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "place to store the result of the parsing" - }, - { - "name": "value", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "value to parse" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "parseBool", - "cppFunctionName": "ParseBool", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Parse a string value as a bool.

\n" - }, - { - "cFunctionName": "git_config_parse_int32", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "int32_t *", - "cppClassName": "int32_t", - "jsClassName": "int32_t", - "comment": "place to store the result of the parsing" - }, - { - "name": "value", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "value to parse" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "parseInt32", - "cppFunctionName": "ParseInt32", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Parse a string value as an int32.

\n" - }, - { - "cFunctionName": "git_config_parse_int64", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "int64_t *", - "cppClassName": "int64_t", - "jsClassName": "int64_t", - "comment": "place to store the result of the parsing" - }, - { - "name": "value", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "value to parse" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "parseInt64", - "cppFunctionName": "ParseInt64", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Parse a string value as an int64.

\n" - } - ] - }, - { - "filename": "cred_helpers.h", - "ignore": true, - "jsClassName": "CredHelpers", - "cppClassName": "CredHelpers", - "cType": "git_cred_helpers", - "freeFunctionName": "git_cred_helpers_free", - "functions": [ - { - "cFunctionName": "git_cred_userpass", - "args": [ - { - "name": "cred", - "cType": "git_cred **", - "cppClassName": "Cred", - "jsClassName": "Cred", - "comment": "The newly created credential object." - }, - { - "name": "url", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "The resource for which we are demanding a credential." - }, - { - "name": "user_from_url", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "The username that was embedded in a \"user@host\" remote url, or NULL if not included." - }, - { - "name": "allowed_types", - "cType": "unsigned int", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "A bitmask stating which cred types are OK to return." - }, - { - "name": "payload", - "cType": "void *", - "cppClassName": "void", - "jsClassName": "void", - "comment": "The payload provided when specifying this callback. (This is interpreted as a `git_cred_userpass_payload*`.)" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "gitCredUserpass", - "cppFunctionName": "GitCredUserpass", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Stock callback usable as a git_cred_acquire_cb. This calls\ngit_cred_userpass_plaintext_new unless the protocol has not specified\nGIT_CREDTYPE_USERPASS_PLAINTEXT as an allowed type.

\n" - } - ] - }, - { - "filename": "patch.h", - "dependencies": [ - "../include/delta.h", - "../include/diff_range.h" - ], - "jsClassName": "Patch", - "cppClassName": "GitPatch", - "cType": "git_diff_patch", - "freeFunctionName": "git_diff_patch_free", - "functions": [ - { - "cFunctionName": "git_diff_patch_free", - "args": [ - { - "name": "patch", - "cType": "git_diff_patch *", - "cppClassName": "GitPatch", - "jsClassName": "Patch", - "isSelf": true - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "isFree": true, - "jsFunctionName": "patchFree", - "cppFunctionName": "PatchFree", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Free a git_diff_patch object.

\n" - }, - { - "cFunctionName": "git_diff_patch_delta", - "args": [ - { - "name": "patch", - "cType": "git_diff_patch *", - "cppClassName": "GitPatch", - "jsClassName": "Patch", - "isSelf": true - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "delta", - "cppFunctionName": "Delta", - "return": { - "cType": "const git_diff_delta *", - "cppClassName": "GitDelta", - "copy": "git_diff_delta_dup", - "jsClassName": "Delta" - }, - "description": "

Get the delta associated with a patch

\n" - }, - { - "cFunctionName": "git_diff_patch_num_hunks", - "args": [ - { - "name": "patch", - "cType": "git_diff_patch *", - "cppClassName": "GitPatch", - "jsClassName": "Patch", - "isSelf": true - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "size", - "cppFunctionName": "Size", - "return": { - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number" - }, - "description": "

Get the number of hunks in a patch

\n" - }, - { - "cFunctionName": "git_diff_patch_line_stats", - "args": [ - { - "name": "total_context", - "cType": "size_t *", - "cppClassName": "Integer", - "jsClassName": "Number", - "isReturn": true, - "comment": "Count of context lines in output, can be NULL." - }, - { - "name": "total_additions", - "cType": "size_t *", - "cppClassName": "Integer", - "jsClassName": "Number", - "isReturn": true, - "comment": "Count of addition lines in output, can be NULL." - }, - { - "name": "total_deletions", - "cType": "size_t *", - "cppClassName": "Integer", - "jsClassName": "Number", - "isReturn": true, - "comment": "Count of deletion lines in output, can be NULL." - }, - { - "name": "patch", - "cType": "const git_diff_patch *", - "cppClassName": "GitPatch", - "jsClassName": "Patch", - "isSelf": true, - "comment": "The git_diff_patch object" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "stats", - "cppFunctionName": "Stats", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, <0 on error", - "jsClassName": "Number" - }, - "description": "

Get line counts of each type in a patch.

\n" - }, - { - "cFunctionName": "git_diff_patch_get_hunk", - "args": [ - { - "name": "range", - "jsName": "range", - "cType": "const git_diff_range **", - "cppClassName": "GitDiffRange", - "jsClassName": "DiffRange", - "isReturn": true, - "copy": "git_diff_range_dup", - "comment": "Output pointer to git_diff_range of hunk" - }, - { - "name": "header", - "jsName": "header", - "cType": "const char **", - "cppClassName": "String", - "jsClassName": "String", - "isReturn": true, - "comment": "Output pointer to header string for hunk. Unlike the content pointer for each line, this will be NUL-terminated" - }, - { - "name": "header_len", - "jsName": "headerLength", - "cType": "size_t *", - "cppClassName": "Uint32", - "jsClassName": "Number", - "isReturn": true, - "comment": "Output value of characters in header string" - }, - { - "name": "lines_in_hunk", - "jsName": "lines", - "cType": "size_t *", - "cppClassName": "Uint32", - "jsClassName": "Number", - "isReturn": true, - "comment": "Output count of total lines in this hunk" - }, - { - "name": "patch", - "cType": "git_diff_patch *", - "cppClassName": "GitPatch", - "jsClassName": "Patch", - "isSelf": true, - "comment": "Input pointer to patch object" - }, - { - "name": "hunk_idx", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "Input index of hunk to get information about" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "hunk", - "cppFunctionName": "Hunk", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_ENOTFOUND if hunk_idx out of range, <0 on error", - "jsClassName": "Number" - }, - "description": "

Get the information about a hunk in a patch

\n" - }, - { - "cFunctionName": "git_diff_patch_num_lines_in_hunk", - "args": [ - { - "name": "patch", - "cType": "git_diff_patch *", - "cppClassName": "GitPatch", - "jsClassName": "Patch", - "isSelf": true, - "comment": "The git_diff_patch object" - }, - { - "name": "hunk_idx", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "Index of the hunk" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "lines", - "cppFunctionName": "Lines", - "return": { - "cType": "int", - "cppClassName": "Int32", - "comment": "Number of lines in hunk or -1 if invalid hunk index", - "jsClassName": "Number" - }, - "description": "

Get the number of lines in a hunk.

\n" - }, - { - "cFunctionName": "git_diff_patch_get_line_in_hunk", - "args": [ - { - "name": "line_origin", - "jsName": "lineOrigin", - "cType": "char *", - "cppClassName": "Integer", - "jsClassName": "Number", - "isReturn": true, - "comment": "A GIT_DIFF_LINE constant from above" - }, - { - "name": "content", - "jsName": "content", - "size": "content_len", - "cType": "const char **", - "cppClassName": "String", - "jsClassName": "String", - "isReturn": true, - "comment": "Pointer to content of diff line, not NUL-terminated" - }, - { - "name": "content_len", - "jsName": "length", - "cType": "size_t *", - "cppClassName": "Uint32", - "jsClassName": "Number", - "isReturn": true, - "comment": "Number of characters in content" - }, - { - "name": "old_lineno", - "jsName": "oldLineNumber", - "cType": "int *", - "cppClassName": "Int32", - "jsClassName": "Number", - "isReturn": true, - "comment": "Line number in old file or -1 if line is added" - }, - { - "name": "new_lineno", - "jsName": "newLineNumber", - "cType": "int *", - "cppClassName": "Int32", - "jsClassName": "Number", - "isReturn": true, - "comment": "Line number in new file or -1 if line is deleted" - }, - { - "name": "patch", - "cType": "git_diff_patch *", - "cppClassName": "GitPatch", - "jsClassName": "Patch", - "isSelf": true, - "comment": "The patch to look in" - }, - { - "name": "hunk_idx", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "The index of the hunk" - }, - { - "name": "line_of_hunk", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "The index of the line in the hunk" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "line", - "cppFunctionName": "Line", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, <0 on failure", - "jsClassName": "Number" - }, - "description": "

Get data about a line in a hunk of a patch.

\n" - }, - { - "cFunctionName": "git_diff_patch_print", - "args": [ - { - "name": "patch", - "cType": "git_diff_patch *", - "cppClassName": "GitPatch", - "jsClassName": "Patch", - "isSelf": true, - "comment": "A git_diff_patch representing changes to one file" - }, - { - "name": "print_cb", - "cType": "git_diff_data_cb", - "cppClassName": "DiffDataCb", - "jsClassName": "DiffDataCb", - "comment": "Callback function to output lines of the patch. Will be called for file headers, hunk headers, and diff lines." - }, - { - "name": "payload", - "cType": "void *", - "cppClassName": "void", - "jsClassName": "void", - "comment": "Reference pointer that will be passed to your callbacks." - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "patchPrint", - "cppFunctionName": "PatchPrint", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_EUSER on non-zero callback, or error code", - "jsClassName": "Number" - }, - "description": "

Serialize the patch to text via callback.

\n" - }, - { - "cFunctionName": "git_diff_patch_to_str", - "args": [ - { - "name": "string", - "cType": "char **", - "cppClassName": "String", - "jsClassName": "String", - "isReturn": true, - "comment": "Allocated string; caller must free.", - "freeFunctionName": "free" - }, - { - "name": "patch", - "cType": "git_diff_patch *", - "cppClassName": "GitPatch", - "jsClassName": "Patch", - "isSelf": true, - "comment": "A git_diff_patch representing changes to one file" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "toString", - "cppFunctionName": "ToString", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, <0 on failure.", - "jsClassName": "Number" - }, - "description": "

Get the content of a patch as a single diff text.

\n" - } - ] - }, - { - "filename": "clone_options.h", - "dependencies": [], - "jsClassName": "CloneOptions", - "cppClassName": "GitCloneOptions", - "cType": "git_clone_options", - "freeFunctionName": "free", - "fields": [] - }, - { - "filename": "diff_options.h", - "dependencies": [], - "jsClassName": "DiffOptions", - "cppClassName": "GitDiffOptions", - "cType": "git_diff_options", - "freeFunctionName": "free", - "fields": [] - }, - { - "filename": "diff_find_options.h", - "dependencies": [], - "jsClassName": "DiffFindOptions", - "cppClassName": "GitDiffFindOptions", - "cType": "git_diff_find_options", - "freeFunctionName": "free", - "fields": [] - }, - { - "filename": "diff_range.h", - "dependencies": [], - "jsClassName": "DiffRange", - "cppClassName": "GitDiffRange", - "cType": "git_diff_range", - "freeFunctionName": "free", - "fields": [ - { - "jsFunctionName": "oldStart", - "cppFunctionName": "OldStart", - "name": "old_start", - "cType": "int", - "cppClassName": "Integer", - "jsClassName": "Number" - }, - { - "jsFunctionName": "oldLines", - "cppFunctionName": "OldLines", - "name": "old_lines", - "cType": "int", - "cppClassName": "Integer", - "jsClassName": "Number" - }, - { - "jsFunctionName": "newStart", - "cppFunctionName": "NewStart", - "name": "new_start", - "cType": "int", - "cppClassName": "Integer", - "jsClassName": "Number" - }, - { - "jsFunctionName": "newLines", - "cppFunctionName": "NewLines", - "name": "new_lines", - "cType": "int", - "cppClassName": "Integer", - "jsClassName": "Number" - } - ] - }, - { - "filename": "diff_file.h", - "dependencies": [ - "../include/oid.h" - ], - "jsClassName": "DiffFile", - "cppClassName": "GitDiffFile", - "cType": "git_diff_file", - "freeFunctionName": "free", - "fields": [ - { - "jsFunctionName": "oid", - "cppFunctionName": "Oid", - "name": "oid", - "cType": "git_oid", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "copy": "git_oid_dup" - }, - { - "jsFunctionName": "path", - "cppFunctionName": "Path", - "name": "path", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String" - }, - { - "jsFunctionName": "size", - "cppFunctionName": "Size", - "name": "size", - "cType": "git_off_t", - "cppClassName": "Integer", - "jsClassName": "Number" - }, - { - "jsFunctionName": "flags", - "cppFunctionName": "Flags", - "name": "flags", - "cType": "uint32_t", - "cppClassName": "Integer", - "jsClassName": "Number" - }, - { - "jsFunctionName": "mode", - "cppFunctionName": "Mode", - "name": "mode", - "cType": "uint16_t", - "cppClassName": "Integer", - "jsClassName": "Number" - } - ] - }, - { - "filename": "delta.h", - "dependencies": [ - "../include/diff_file.h" - ], - "jsClassName": "Delta", - "cppClassName": "GitDelta", - "cType": "git_diff_delta", - "freeFunctionName": "free", - "fields": [ - { - "jsFunctionName": "oldFile", - "cppFunctionName": "OldFile", - "name": "old_file", - "cType": "git_diff_file", - "cppClassName": "GitDiffFile", - "jsClassName": "DiffFile", - "copy": "git_diff_file_dup" - }, - { - "jsFunctionName": "newFile", - "cppFunctionName": "NewFile", - "name": "new_file", - "cType": "git_diff_file", - "cppClassName": "GitDiffFile", - "jsClassName": "DiffFile", - "copy": "git_diff_file_dup" - }, - { - "jsFunctionName": "status", - "cppFunctionName": "Status", - "name": "status", - "cType": "git_delta_t", - "cppClassName": "Integer", - "jsClassName": "Number" - }, - { - "jsFunctionName": "similarity", - "cppFunctionName": "Similarity", - "name": "similarity", - "cType": "uint32_t", - "cppClassName": "Integer", - "jsClassName": "Number" - }, - { - "jsFunctionName": "flags", - "cppFunctionName": "Flags", - "name": "flags", - "cType": "uint32_t", - "cppClassName": "Integer", - "jsClassName": "Number" - } - ] - }, - { - "filename": "diff_list.h", - "dependencies": [ - "../include/diff_options.h", - "../include/diff_find_options.h", - "../include/repo.h", - "../include/tree.h", - "../include/index.h", - "../include/patch.h", - "../include/delta.h" - ], - "jsClassName": "DiffList", - "cppClassName": "GitDiffList", - "cType": "git_diff_list", - "freeFunctionName": "git_diff_list_free", - "functions": [ - { - "cFunctionName": "git_diff_list_free", - "args": [ - { - "name": "diff", - "cType": "git_diff_list *", - "cppClassName": "GitDiffList", - "jsClassName": "DiffList", - "comment": "The previously created diff list; cannot be used after free." - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "isFree": true, - "jsFunctionName": "listFree", - "cppFunctionName": "ListFree", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Deallocate a diff list.

\n" - }, - { - "cFunctionName": "git_diff_merge", - "args": [ - { - "name": "onto", - "cType": "git_diff_list *", - "cppClassName": "GitDiffList", - "jsClassName": "DiffList", - "isSelf": true, - "comment": "Diff to merge into." - }, - { - "name": "from", - "cType": "const git_diff_list *", - "cppClassName": "GitDiffList", - "jsClassName": "DiffList", - "comment": "Diff to merge." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "merge", - "cppFunctionName": "Merge", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Merge one diff list into another.

\n" - }, - { - "cFunctionName": "git_diff_find_similar", - "args": [ - { - "name": "diff", - "cType": "git_diff_list *", - "cppClassName": "GitDiffList", - "jsClassName": "DiffList", - "isSelf": true, - "comment": "Diff list to run detection algorithms on" - }, - { - "name": "options", - "cType": "git_diff_find_options *", - "cppClassName": "GitDiffFindOptions", - "jsClassName": "DiffFindOptions", - "comment": "Control how detection should be run, NULL for defaults" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "findSimilar", - "cppFunctionName": "FindSimilar", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, -1 on failure", - "jsClassName": "Number" - }, - "description": "

Transform a diff list marking file renames, copies, etc.

\n" - }, - { - "cFunctionName": "git_diff_foreach", - "args": [ - { - "name": "diff", - "cType": "git_diff_list *", - "cppClassName": "GitDiffList", - "jsClassName": "DiffList", - "comment": "A git_diff_list generated by one of the above functions." - }, - { - "name": "file_cb", - "cType": "git_diff_file_cb", - "cppClassName": "DiffFileCb", - "jsClassName": "DiffFileCb", - "comment": "Callback function to make per file in the diff." - }, - { - "name": "hunk_cb", - "cType": "git_diff_hunk_cb", - "cppClassName": "DiffHunkCb", - "jsClassName": "DiffHunkCb", - "comment": "Optional callback to make per hunk of text diff. This callback is called to describe a range of lines in the diff. It will not be issued for binary files." - }, - { - "name": "line_cb", - "cType": "git_diff_data_cb", - "cppClassName": "DiffDataCb", - "jsClassName": "DiffDataCb", - "comment": "Optional callback to make per line of diff text. This same callback will be made for context lines, added, and removed lines, and even for a deleted trailing newline." - }, - { - "name": "payload", - "cType": "void *", - "cppClassName": "void", - "jsClassName": "void", - "comment": "Reference pointer that will be passed to your callbacks." - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "foreach", - "cppFunctionName": "Foreach", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_EUSER on non-zero callback, or error code", - "jsClassName": "Number" - }, - "description": "

Loop over all deltas in a diff list issuing callbacks.

\n" - }, - { - "cFunctionName": "git_diff_print_compact", - "args": [ - { - "name": "diff", - "cType": "git_diff_list *", - "cppClassName": "GitDiffList", - "jsClassName": "DiffList", - "comment": "A git_diff_list generated by one of the above functions." - }, - { - "name": "print_cb", - "cType": "git_diff_data_cb", - "cppClassName": "DiffDataCb", - "jsClassName": "DiffDataCb", - "comment": "Callback to make per line of diff text." - }, - { - "name": "payload", - "cType": "void *", - "cppClassName": "void", - "jsClassName": "void", - "comment": "Reference pointer that will be passed to your callback." - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "printCompact", - "cppFunctionName": "PrintCompact", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_EUSER on non-zero callback, or error code", - "jsClassName": "Number" - }, - "description": "

Iterate over a diff generating text output like "git diff --name-status".

\n" - }, - { - "cFunctionName": "git_diff_status_char", - "args": [ - { - "name": "status", - "cType": "git_delta_t", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "The git_delta_t value to look up" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "statusChar", - "cppFunctionName": "StatusChar", - "return": { - "cType": "char", - "cppClassName": "String", - "comment": "The single character label for that code", - "jsClassName": "String" - }, - "description": "

Look up the single character abbreviation for a delta status code.

\n" - }, - { - "cFunctionName": "git_diff_print_patch", - "args": [ - { - "name": "diff", - "cType": "git_diff_list *", - "cppClassName": "GitDiffList", - "jsClassName": "DiffList", - "isSelf": true, - "comment": "A git_diff_list generated by one of the above functions." - }, - { - "name": "print_cb", - "cType": "git_diff_data_cb", - "cppClassName": "DiffDataCb", - "jsClassName": "DiffDataCb", - "comment": "Callback function to output lines of the diff. This same function will be called for file headers, hunk headers, and diff lines. Fortunately, you can probably use various GIT_DIFF_LINE constants to determine what text you are given." - }, - { - "name": "payload", - "cType": "void *", - "cppClassName": "void", - "jsClassName": "void", - "comment": "Reference pointer that will be passed to your callbacks." - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "printPatch", - "cppFunctionName": "PrintPatch", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_EUSER on non-zero callback, or error code", - "jsClassName": "Number" - }, - "description": "

Iterate over a diff generating text output like "git diff".

\n" - }, - { - "cFunctionName": "git_diff_num_deltas", - "args": [ - { - "name": "diff", - "cType": "git_diff_list *", - "cppClassName": "GitDiffList", - "jsClassName": "DiffList", - "isSelf": true, - "comment": "A git_diff_list generated by one of the above functions" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "size", - "cppFunctionName": "Size", - "return": { - "cType": "size_t", - "cppClassName": "Uint32", - "comment": "Count of number of deltas in the list", - "jsClassName": "Number" - }, - "description": "

Query how many diff records are there in a diff list.

\n" - }, - { - "cFunctionName": "git_diff_num_deltas_of_type", - "args": [ - { - "name": "diff", - "cType": "git_diff_list *", - "cppClassName": "GitDiffList", - "jsClassName": "DiffList", - "comment": "A git_diff_list generated by one of the above functions" - }, - { - "name": "type", - "cType": "git_delta_t", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "A git_delta_t value to filter the count" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "numDeltasOfType", - "cppFunctionName": "NumDeltasOfType", - "return": { - "cType": "size_t", - "cppClassName": "Uint32", - "comment": "Count of number of deltas matching delta_t type", - "jsClassName": "Number" - }, - "description": "

Query how many diff deltas are there in a diff list filtered by type.

\n" - }, - { - "cFunctionName": "git_diff_get_patch", - "args": [ - { - "name": "patch_out", - "jsName": "patch", - "cType": "git_diff_patch **", - "cppClassName": "GitPatch", - "jsClassName": "Patch", - "isReturn": true, - "comment": "Output parameter for the delta patch object" - }, - { - "name": "delta_out", - "jsName": "delta", - "cType": "const git_diff_delta **", - "copy": "git_diff_delta_dup", - "cppClassName": "GitDelta", - "jsClassName": "Delta", - "isReturn": true, - "comment": "Output parameter for the delta object" - }, - { - "name": "diff", - "cType": "git_diff_list *", - "cppClassName": "GitDiffList", - "jsClassName": "DiffList", - "isSelf": true, - "comment": "Diff list object" - }, - { - "name": "idx", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "Index into diff list" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "patch", - "cppFunctionName": "Patch", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, other value < 0 on error", - "jsClassName": "Number" - }, - "description": "

Return the diff delta and patch for an entry in the diff list.

\n" - }, - { - "cFunctionName": "git_diff_blobs", - "args": [ - { - "name": "old_blob", - "cType": "const git_blob *", - "cppClassName": "GitBlob", - "jsClassName": "Blob", - "comment": "Blob for old side of diff, or NULL for empty blob" - }, - { - "name": "new_blob", - "cType": "const git_blob *", - "cppClassName": "GitBlob", - "jsClassName": "Blob" - }, - { - "name": "options", - "cType": "const git_diff_options *", - "cppClassName": "GitDiffOptions", - "jsClassName": "DiffOptions" - }, - { - "name": "file_cb", - "cType": "git_diff_file_cb", - "cppClassName": "DiffFileCb", - "jsClassName": "DiffFileCb" - }, - { - "name": "hunk_cb", - "cType": "git_diff_hunk_cb", - "cppClassName": "DiffHunkCb", - "jsClassName": "DiffHunkCb" - }, - { - "name": "line_cb", - "cType": "git_diff_data_cb", - "cppClassName": "DiffDataCb", - "jsClassName": "DiffDataCb" - }, - { - "name": "payload", - "cType": "void *", - "cppClassName": "void", - "jsClassName": "void" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "blobs", - "cppFunctionName": "Blobs", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_EUSER on non-zero callback return, or error code", - "jsClassName": "Number" - }, - "description": "

Directly run a diff on two blobs.

\n" - }, - { - "cFunctionName": "git_diff_blob_to_buffer", - "args": [ - { - "name": "old_blob", - "cType": "const git_blob *", - "cppClassName": "GitBlob", - "jsClassName": "Blob", - "comment": "Blob for old side of diff, or NULL for empty blob" - }, - { - "name": "buffer", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String" - }, - { - "name": "buffer_len", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number" - }, - { - "name": "options", - "cType": "const git_diff_options *", - "cppClassName": "GitDiffOptions", - "jsClassName": "DiffOptions" - }, - { - "name": "file_cb", - "cType": "git_diff_file_cb", - "cppClassName": "DiffFileCb", - "jsClassName": "DiffFileCb" - }, - { - "name": "hunk_cb", - "cType": "git_diff_hunk_cb", - "cppClassName": "DiffHunkCb", - "jsClassName": "DiffHunkCb" - }, - { - "name": "data_cb", - "cType": "git_diff_data_cb", - "cppClassName": "DiffDataCb", - "jsClassName": "DiffDataCb" - }, - { - "name": "payload", - "cType": "void *", - "cppClassName": "void", - "jsClassName": "void" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "blobToBuffer", - "cppFunctionName": "BlobToBuffer", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_EUSER on non-zero callback return, or error code", - "jsClassName": "Number" - }, - "description": "

Directly run a diff between a blob and a buffer.

\n" - } - ] - }, - { - "filename": "error.h", - "ignore": true, - "jsClassName": "Error", - "cppClassName": "GitError", - "cType": "git_error", - "freeFunctionName": "git_errors_free", - "functions": [ - { - "cFunctionName": "giterr_last", - "args": [], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "lastError", - "cppFunctionName": "LastError", - "return": { - "cType": "const git_error *", - "cppClassName": "Error", - "copy": "fixme", - "comment": "A git_error object." - }, - "description": "

Return the last git_error object that was generated for the\ncurrent thread or NULL if no error has occurred.

\n" - }, - { - "cFunctionName": "giterr_clear", - "args": [], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "clear", - "cppFunctionName": "Clear", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Clear the last library error that occurred for this thread.

\n" - }, - { - "cFunctionName": "giterr_set_str", - "args": [ - { - "name": "error_class", - "cType": "int", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "One of the `git_error_t` enum above describing the general subsystem that is responsible for the error." - }, - { - "name": "string", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "The formatted error message to keep" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "setString", - "cppFunctionName": "SetString", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Set the error message string for this thread.

\n" - }, - { - "cFunctionName": "giterr_set_oom", - "args": [], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "setOOM", - "cppFunctionName": "SetOOM", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Set the error message to a special value for memory allocation failure.

\n" - } - ] - }, - { - "filename": "graph.h", - "ignore": true, - "jsClassName": "Graph", - "cppClassName": "Graph", - "cType": "git_graph", - "freeFunctionName": "git_graph_free", - "functions": [ - { - "cFunctionName": "git_graph_ahead_behind", - "args": [ - { - "name": "ahead", - "cType": "size_t *", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "number of unique from commits in `upstream`" - }, - { - "name": "behind", - "cType": "size_t *", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "number of unique from commits in `local`" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "the repository where the commits exist" - }, - { - "name": "local", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "the commit for local" - }, - { - "name": "upstream", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "the commit for upstream" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "aheadBehind", - "cppFunctionName": "AheadBehind", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Count the number of unique commits between two commit objects

\n" - } - ] - }, - { - "filename": "ignore.h", - "ignore": true, - "jsClassName": "Ignore", - "cppClassName": "Ignore", - "cType": "git_ignore", - "freeFunctionName": "git_ignore_free", - "functions": [ - { - "cFunctionName": "git_ignore_add_rule", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "The repository to add ignore rules to." - }, - { - "name": "rules", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "Text of rules, a la the contents of a .gitignore file. It is okay to have multiple rules in the text; if so, each rule should be terminated with a newline." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "addRule", - "cppFunctionName": "AddRule", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success", - "jsClassName": "Number" - }, - "description": "

Add ignore rules for a repository.

\n" - }, - { - "cFunctionName": "git_ignore_clear_internal_rules", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "The repository to remove ignore rules from." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "clearInternalRules", - "cppFunctionName": "ClearInternalRules", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success", - "jsClassName": "Number" - }, - "description": "

Clear ignore rules that were explicitly added.

\n" - }, - { - "cFunctionName": "git_ignore_path_is_ignored", - "args": [ - { - "name": "ignored", - "cType": "int *", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "boolean returning 0 if the file is not ignored, 1 if it is" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "a repository object" - }, - { - "name": "path", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the file to check ignores for, relative to the repo's workdir." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "pathIsIgnored", - "cppFunctionName": "PathIsIgnored", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 if ignore rules could be processed for the file (regardless of whether it exists or not), or an error < 0 if they could not.", - "jsClassName": "Number" - }, - "description": "

Test if the ignore rules apply to a given path.

\n" - } - ] - }, - { - "filename": "index_time.h", - "jsClassName": "IndexTime", - "cppClassName": "GitIndexTime", - "cType": "git_index_time", - "freeFunctionName": "free", - "fields": [ - { - "jsFunctionName": "seconds", - "cppFunctionName": "Seconds", - "name": "seconds", - "cType": "git_time_t", - "cppClassName": "Uint32", - "jsClassName": "Number" - }, - { - "jsFunctionName": "nanoseconds", - "cppFunctionName": "Nanoseconds", - "name": "nanoseconds", - "cType": "unsigned int", - "cppClassName": "Uint32", - "jsClassName": "Number" - } - ] - }, - { - "filename": "index_entry.h", - "dependencies": [ - "../include/index_time.h", - "../include/oid.h" - ], - "jsClassName": "IndexEntry", - "cppClassName": "GitIndexEntry", - "cType": "git_index_entry", - "freeFunctionName": "free", - "fields": [ - { - "jsFunctionName": "ctime", - "cppFunctionName": "Ctime", - "name": "ctime", - "cType": "git_index_time", - "cppClassName": "GitIndexTime", - "jsClassName": "IndexTime", - "copy": "git_index_time_dup" - }, - { - "jsFunctionName": "mtime", - "cppFunctionName": "Mtime", - "name": "mtime", - "cType": "git_index_time", - "cppClassName": "GitIndexTime", - "jsClassName": "IndexTime", - "copy": "git_index_time_dup" - }, - { - "jsFunctionName": "dev", - "cppFunctionName": "Dev", - "name": "dev", - "cType": "unsigned int", - "cppClassName": "Uint32", - "jsClassName": "Number" - }, - { - "jsFunctionName": "ino", - "cppFunctionName": "Ino", - "name": "ino", - "cType": "unsigned int", - "cppClassName": "Uint32", - "jsClassName": "Number" - }, - { - "jsFunctionName": "mode", - "cppFunctionName": "Mode", - "name": "mode", - "cType": "uint16_t", - "cppClassName": "Integer", - "jsClassName": "Number" - }, - { - "jsFunctionName": "uid", - "cppFunctionName": "Uid", - "name": "uid", - "cType": "unsigned int", - "cppClassName": "Uint32", - "jsClassName": "Number" - }, - { - "jsFunctionName": "gid", - "cppFunctionName": "gid", - "name": "gid", - "cType": "unsigned int", - "cppClassName": "Uint32", - "jsClassName": "Number" - }, - { - "jsFunctionName": "file_size", - "cppFunctionName": "FileSize", - "name": "file_size", - "cType": "unsigned int", - "cppClassName": "Uint32", - "jsClassName": "Number" - }, - { - "jsFunctionName": "oid", - "cppFunctionName": "Oid", - "name": "oid", - "cType": "git_oid", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "copy": "git_oid_dup" - }, - { - "jsFunctionName": "flags", - "cppFunctionName": "Flags", - "name": "flags", - "cType": "uint16_t", - "cppClassName": "Integer", - "jsClassName": "Number" - }, - { - "jsFunctionName": "flags_extended", - "cppFunctionName": "FlagsExtended", - "name": "flags_extended", - "cType": "uint16_t", - "cppClassName": "Integer", - "jsClassName": "Number" - }, - { - "jsFunctionName": "path", - "cppFunctionName": "Path", - "name": "path", - "cType": "char *", - "cppClassName": "String", - "jsClassName": "String" - } - ] - }, - { - "filename": "index.h", - "dependencies": [ - "../include/oid.h", - "../include/repo.h", - "../include/tree.h", - "../include/diff_list.h", - "../include/diff_options.h", - "../include/index_entry.h" - ], - "jsClassName": "Index", - "cppClassName": "GitIndex", - "cType": "git_index", - "freeFunctionName": "git_index_free", - "functions": [ - { - "cFunctionName": "git_index_open", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_index **", - "cppClassName": "GitIndex", - "jsClassName": "Index", - "comment": "the pointer for the new index" - }, - { - "name": "index_path", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the path to the index file in disk" - } - ], - "isAsync": true, - "isConstructorMethod": true, - "isPrototypeMethod": false, - "jsFunctionName": "open", - "cppFunctionName": "Open", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Create a new bare Git index object as a memory representation\nof the Git index file in 'index_path', without a repository\nto back it.

\n" - }, - { - "cFunctionName": "git_index_new", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_index **", - "cppClassName": "GitIndex", - "jsClassName": "Index", - "comment": "the pointer for the new index" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": true, - "isPrototypeMethod": false, - "jsFunctionName": "new", - "cppFunctionName": "New", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Create an in-memory index object.

\n" - }, - { - "cFunctionName": "git_index_free", - "args": [ - { - "name": "index", - "cType": "git_index *", - "cppClassName": "GitIndex", - "jsClassName": "Index", - "comment": "an existing index object" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "isFree": true, - "jsFunctionName": "free", - "cppFunctionName": "Free", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Free an existing index object.

\n" - }, - { - "cFunctionName": "git_index_owner", - "args": [ - { - "name": "index", - "cType": "const git_index *", - "cppClassName": "GitIndex", - "jsClassName": "Index", - "isSelf": true, - "comment": "The index" - } - ], - "ignore": "Never make public for memory allocation reasons", - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "owner", - "cppFunctionName": "Owner", - "return": { - "cType": "git_repository *", - "cppClassName": "GitRepo", - "comment": "A pointer to the repository", - "jsClassName": "Repository" - }, - "description": "

Get the repository this index relates to

\n" - }, - { - "cFunctionName": "git_index_caps", - "args": [ - { - "name": "index", - "cType": "const git_index *", - "cppClassName": "GitIndex", - "jsClassName": "Index", - "isSelf": true, - "comment": "An existing index object" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "caps", - "cppFunctionName": "Caps", - "return": { - "cType": "unsigned int", - "cppClassName": "Uint32", - "comment": "A combination of GIT_INDEXCAP values", - "jsClassName": "Number" - }, - "description": "

Read index capabilities flags.

\n" - }, - { - "cFunctionName": "git_index_set_caps", - "args": [ - { - "name": "index", - "cType": "git_index *", - "cppClassName": "GitIndex", - "jsClassName": "Index", - "isSelf": true, - "comment": "An existing index object" - }, - { - "name": "caps", - "cType": "unsigned int", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "A combination of GIT_INDEXCAP values" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "setCaps", - "cppFunctionName": "SetCaps", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, -1 on failure", - "jsClassName": "Number" - }, - "description": "

Set index capabilities flags.

\n" - }, - { - "cFunctionName": "git_index_read", - "args": [ - { - "name": "index", - "cType": "git_index *", - "cppClassName": "GitIndex", - "jsClassName": "Index", - "isSelf": true, - "comment": "an existing index object" - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "read", - "cppFunctionName": "Read", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Update the contents of an existing index object in memory\nby reading from the hard disk.

\n" - }, - { - "cFunctionName": "git_index_write", - "args": [ - { - "name": "index", - "cType": "git_index *", - "cppClassName": "GitIndex", - "jsClassName": "Index", - "isSelf": true, - "comment": "an existing index object" - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "write", - "cppFunctionName": "Write", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Write an existing index object from memory back to disk\nusing an atomic file lock.

\n" - }, - { - "cFunctionName": "git_index_read_tree", - "args": [ - { - "name": "index", - "cType": "git_index *", - "cppClassName": "GitIndex", - "jsClassName": "Index", - "isSelf": true, - "comment": "an existing index object" - }, - { - "name": "tree", - "cType": "const git_tree *", - "cppClassName": "GitTree", - "jsClassName": "Tree", - "comment": "tree to read" - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "readTree", - "cppFunctionName": "ReadTree", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Read a tree into the index file with stats

\n" - }, - { - "cFunctionName": "git_index_write_tree", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "shouldAlloc": true, - "comment": "Pointer where to store the OID of the written tree" - }, - { - "name": "index", - "cType": "git_index *", - "cppClassName": "GitIndex", - "jsClassName": "Index", - "isSelf": true, - "comment": "Index to write" - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "writeTree", - "cppFunctionName": "WriteTree", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_EUNMERGED when the index is not clean or an error code", - "jsClassName": "Number" - }, - "description": "

Write the index as a tree

\n" - }, - { - "cFunctionName": "git_index_write_tree_to", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "shouldAlloc": true, - "comment": "Pointer where to store OID of the the written tree" - }, - { - "name": "index", - "cType": "git_index *", - "cppClassName": "GitIndex", - "jsClassName": "Index", - "comment": "Index to write" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "Repository where to write the tree" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "writeTreeTo", - "cppFunctionName": "WriteTreeTo", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_EUNMERGED when the index is not clean or an error code", - "jsClassName": "Number" - }, - "description": "

Write the index as a tree to the given repository

\n" - }, - { - "cFunctionName": "git_index_entrycount", - "args": [ - { - "name": "index", - "cType": "const git_index *", - "cppClassName": "GitIndex", - "jsClassName": "Index", - "isSelf": true, - "comment": "an existing index object" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "size", - "cppFunctionName": "Size", - "return": { - "cType": "size_t", - "cppClassName": "Uint32", - "comment": "integer of count of current entries", - "jsClassName": "Number" - }, - "description": "

Get the count of entries currently in the index

\n" - }, - { - "cFunctionName": "git_index_clear", - "args": [ - { - "name": "index", - "cType": "git_index *", - "cppClassName": "GitIndex", - "jsClassName": "Index", - "isSelf": true, - "comment": "an existing index object" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "clear", - "cppFunctionName": "Clear", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Clear the contents (all the entries) of an index object.\nThis clears the index object in memory; changes must be manually\nwritten to disk for them to take effect.

\n" - }, - { - "cFunctionName": "git_index_get_byindex", - "args": [ - { - "name": "index", - "cType": "git_index *", - "cppClassName": "GitIndex", - "jsClassName": "Index", - "isSelf": true, - "comment": "an existing index object" - }, - { - "name": "n", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "the position of the entry" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "entry", - "cppFunctionName": "Entry", - "return": { - "cType": "const git_index_entry *", - "cppClassName": "GitIndexEntry", - "copy": "git_index_entry_dup", - "comment": "a pointer to the entry; NULL if out of bounds", - "jsClassName": "IndexEntry" - }, - "description": "

Get a pointer to one of the entries in the index

\n" - }, - { - "cFunctionName": "git_index_get_bypath", - "args": [ - { - "name": "index", - "cType": "git_index *", - "cppClassName": "GitIndex", - "jsClassName": "Index", - "isSelf": true, - "comment": "an existing index object" - }, - { - "name": "path", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "path to search" - }, - { - "name": "stage", - "cType": "int", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "stage to search" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "getEntry", - "cppFunctionName": "GetEntry", - "return": { - "cType": "const git_index_entry *", - "cppClassName": "GitIndexEntry", - "copy": "git_index_entry_dup", - "comment": "a pointer to the entry; NULL if it was not found", - "jsClassName": "IndexEntry" - }, - "description": "

Get a pointer to one of the entries in the index

\n" - }, - { - "cFunctionName": "git_index_remove", - "args": [ - { - "name": "index", - "cType": "git_index *", - "cppClassName": "GitIndex", - "jsClassName": "Index", - "isSelf": true, - "comment": "an existing index object" - }, - { - "name": "path", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "path to search" - }, - { - "name": "stage", - "cType": "int", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "stage to search" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "remove", - "cppFunctionName": "Remove", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Remove an entry from the index

\n" - }, - { - "cFunctionName": "git_index_remove_directory", - "args": [ - { - "name": "index", - "cType": "git_index *", - "cppClassName": "GitIndex", - "jsClassName": "Index", - "isSelf": true, - "comment": "an existing index object" - }, - { - "name": "dir", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "container directory path" - }, - { - "name": "stage", - "cType": "int", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "stage to search" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "removeDirectory", - "cppFunctionName": "RemoveDirectory", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Remove all entries from the index under a given directory

\n" - }, - { - "cFunctionName": "git_index_add", - "args": [ - { - "name": "index", - "cType": "git_index *", - "cppClassName": "GitIndex", - "jsClassName": "Index", - "isSelf": true, - "comment": "an existing index object" - }, - { - "name": "source_entry", - "cType": "const git_index_entry *", - "cppClassName": "GitIndexEntry", - "jsClassName": "IndexEntry", - "comment": "new entry object" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "add", - "cppFunctionName": "Add", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Add or update an index entry from an in-memory struct

\n" - }, - { - "cFunctionName": "git_index_entry_stage", - "args": [ - { - "name": "entry", - "cType": "const git_index_entry *", - "cppClassName": "GitIndexEntry", - "jsClassName": "IndexEntry", - "comment": "The entry" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "entryStage", - "cppFunctionName": "EntryStage", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Return the stage number from a git index entry

\n" - }, - { - "cFunctionName": "git_index_add_bypath", - "args": [ - { - "name": "index", - "cType": "git_index *", - "cppClassName": "GitIndex", - "jsClassName": "Index", - "isSelf": true, - "comment": "an existing index object" - }, - { - "name": "path", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "filename to add" - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "addByPath", - "cppFunctionName": "AddBypath", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Add or update an index entry from a file on disk

\n" - }, - { - "cFunctionName": "git_index_remove_bypath", - "args": [ - { - "name": "index", - "cType": "git_index *", - "cppClassName": "GitIndex", - "jsClassName": "Index", - "isSelf": true, - "comment": "an existing index object" - }, - { - "name": "path", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "filename to remove" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "removeByPath", - "cppFunctionName": "RemoveBypath", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Remove an index entry corresponding to a file on disk

\n" - }, - { - "cFunctionName": "git_index_find", - "args": [ - { - "name": "at_pos", - "isReturn": true, - "cType": "size_t *", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "the address to which the position of the index entry is written (optional)" - }, - { - "name": "index", - "cType": "git_index *", - "cppClassName": "GitIndex", - "jsClassName": "Index", - "isSelf": true, - "comment": "an existing index object" - }, - { - "name": "path", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "path to search" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "find", - "cppFunctionName": "Find", - "return": { - "cType": "int", - "cppClassName": "Int32", - "comment": "a zero-based position in the index if found; GIT_ENOTFOUND otherwise", - "jsClassName": "Number" - }, - "description": "

Find the first position of any entries which point to given\npath in the Git index.

\n" - }, - { - "cFunctionName": "git_index_conflict_add", - "args": [ - { - "name": "index", - "cType": "git_index *", - "cppClassName": "GitIndex", - "jsClassName": "Index", - "isSelf": true, - "comment": "an existing index object" - }, - { - "name": "ancestor_entry", - "cType": "const git_index_entry *", - "cppClassName": "GitIndexEntry", - "jsClassName": "IndexEntry", - "comment": "the entry data for the ancestor of the conflict" - }, - { - "name": "our_entry", - "cType": "const git_index_entry *", - "cppClassName": "GitIndexEntry", - "jsClassName": "IndexEntry", - "comment": "the entry data for our side of the merge conflict" - }, - { - "name": "their_entry", - "cType": "const git_index_entry *", - "cppClassName": "GitIndexEntry", - "jsClassName": "IndexEntry", - "comment": "the entry data for their side of the merge conflict" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "conflictAdd", - "cppFunctionName": "ConflictAdd", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Add or update index entries to represent a conflict

\n" - }, - { - "cFunctionName": "git_index_conflict_get", - "args": [ - { - "name": "ancestor_out", - "cType": "git_index_entry **", - "cppClassName": "GitIndexEntry", - "jsClassName": "IndexEntry", - "comment": "Pointer to store the ancestor entry" - }, - { - "name": "our_out", - "cType": "git_index_entry **", - "cppClassName": "GitIndexEntry", - "jsClassName": "IndexEntry", - "comment": "Pointer to store the our entry" - }, - { - "name": "their_out", - "cType": "git_index_entry **", - "cppClassName": "GitIndexEntry", - "jsClassName": "IndexEntry", - "comment": "Pointer to store the their entry" - }, - { - "name": "index", - "cType": "git_index *", - "cppClassName": "GitIndex", - "jsClassName": "Index", - "comment": "an existing index object" - }, - { - "name": "path", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "path to search" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "conflictGet", - "cppFunctionName": "ConflictGet", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Get the index entries that represent a conflict of a single file.

\n" - }, - { - "cFunctionName": "git_index_conflict_remove", - "args": [ - { - "name": "index", - "cType": "git_index *", - "cppClassName": "GitIndex", - "jsClassName": "Index", - "isSelf": true, - "comment": "an existing index object" - }, - { - "name": "path", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "to search" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "conflictRemove", - "cppFunctionName": "ConflictRemove", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Removes the index entries that represent a conflict of a single file.

\n" - }, - { - "cFunctionName": "git_index_conflict_cleanup", - "args": [ - { - "name": "index", - "cType": "git_index *", - "cppClassName": "GitIndex", - "jsClassName": "Index", - "isSelf": true, - "comment": "an existing index object" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "conflictCleanup", - "cppFunctionName": "ConflictCleanup", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Remove all conflicts in the index (entries with a stage greater than 0.)

\n" - }, - { - "cFunctionName": "git_index_has_conflicts", - "args": [ - { - "name": "index", - "cType": "const git_index *", - "cppClassName": "GitIndex", - "jsClassName": "Index", - "isSelf": true - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "hasConflicts", - "cppFunctionName": "HasConflicts", - "return": { - "cType": "int", - "cppClassName": "Int32", - "comment": "1 if at least one conflict is found, 0 otherwise.", - "jsClassName": "Number" - }, - "description": "

Determine if the index contains entries representing file conflicts.

\n" - }, - { - "cFunctionName": "git_index_reuc_entrycount", - "args": [ - { - "name": "index", - "cType": "git_index *", - "cppClassName": "GitIndex", - "jsClassName": "Index", - "isSelf": true, - "comment": "an existing index object" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "reucEntrycount", - "cppFunctionName": "ReucEntrycount", - "return": { - "cType": "unsigned int", - "cppClassName": "Uint32", - "comment": "integer of count of current resolve undo entries", - "jsClassName": "Number" - }, - "description": "

Get the count of resolve undo entries currently in the index.

\n" - }, - { - "cFunctionName": "git_index_reuc_find", - "args": [ - { - "name": "at_pos", - "cType": "size_t *", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "the address to which the position of the reuc entry is written (optional)" - }, - { - "name": "index", - "cType": "git_index *", - "cppClassName": "GitIndex", - "jsClassName": "Index", - "comment": "an existing index object" - }, - { - "name": "path", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "path to search" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "reucFind", - "cppFunctionName": "ReucFind", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 if found, < 0 otherwise (GIT_ENOTFOUND)", - "jsClassName": "Number" - }, - "description": "

Finds the resolve undo entry that points to the given path in the Git\nindex.

\n" - }, - { - "cFunctionName": "git_index_reuc_get_bypath", - "args": [ - { - "name": "index", - "cType": "git_index *", - "cppClassName": "GitIndex", - "jsClassName": "Index", - "isSelf": true, - "comment": "an existing index object" - }, - { - "name": "path", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "path to search" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "reucGetBypath", - "cppFunctionName": "ReucGetBypath", - "return": { - "cType": "const git_index_reuc_entry *", - "cppClassName": "IndexReucEntry", - "copy": "fixme", - "comment": "the resolve undo entry; NULL if not found" - }, - "description": "

Get a resolve undo entry from the index.

\n" - }, - { - "cFunctionName": "git_index_reuc_get_byindex", - "args": [ - { - "name": "index", - "cType": "git_index *", - "cppClassName": "GitIndex", - "jsClassName": "Index", - "isSelf": true, - "comment": "an existing index object" - }, - { - "name": "n", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "the position of the entry" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "reucGetByindex", - "cppFunctionName": "ReucGetByindex", - "return": { - "cType": "const git_index_reuc_entry *", - "cppClassName": "IndexReucEntry", - "copy": "fixme", - "comment": "a pointer to the resolve undo entry; NULL if out of bounds" - }, - "description": "

Get a resolve undo entry from the index.

\n" - }, - { - "cFunctionName": "git_index_reuc_add", - "args": [ - { - "name": "index", - "cType": "git_index *", - "cppClassName": "GitIndex", - "jsClassName": "Index", - "isSelf": true, - "comment": "an existing index object" - }, - { - "name": "path", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "filename to add" - }, - { - "name": "ancestor_mode", - "cType": "int", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "mode of the ancestor file" - }, - { - "name": "ancestor_id", - "cType": "git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "oid of the ancestor file" - }, - { - "name": "our_mode", - "cType": "int", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "mode of our file" - }, - { - "name": "our_id", - "cType": "git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "oid of our file" - }, - { - "name": "their_mode", - "cType": "int", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "mode of their file" - }, - { - "name": "their_id", - "cType": "git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "oid of their file" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "reucAdd", - "cppFunctionName": "ReucAdd", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Adds a resolve undo entry for a file based on the given parameters.

\n" - }, - { - "cFunctionName": "git_index_reuc_remove", - "args": [ - { - "name": "index", - "cType": "git_index *", - "cppClassName": "GitIndex", - "jsClassName": "Index", - "isSelf": true, - "comment": "an existing index object" - }, - { - "name": "n", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "position of the resolve undo entry to remove" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "reucRemove", - "cppFunctionName": "ReucRemove", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Remove an resolve undo entry from the index

\n" - }, - { - "cFunctionName": "git_index_reuc_clear", - "args": [ - { - "name": "index", - "cType": "git_index *", - "cppClassName": "GitIndex", - "jsClassName": "Index", - "isSelf": true, - "comment": "an existing index object" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "reucClear", - "cppFunctionName": "ReucClear", - "return": { - "cType": "void", - "cppClassName": "void", - "comment": "0 or an error code", - "jsClassName": "void" - }, - "description": "

Remove all resolve undo entries from the index

\n" - }, - { - "cFunctionName": "git_diff_index_to_workdir", - "args": [ - { - "name": "diff", - "cType": "git_diff_list **", - "cppClassName": "GitDiffList", - "jsClassName": "DiffList", - "isReturn": true, - "comment": "Output pointer to a git_diff_list pointer to be allocated." - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "The repository." - }, - { - "name": "index", - "cType": "git_index *", - "cppClassName": "GitIndex", - "jsClassName": "Index", - "isOptional": true, - "comment": "The index to diff from; repo index used if NULL." - }, - { - "name": "opts", - "cType": "const git_diff_options *", - "cppClassName": "GitDiffOptions", - "jsClassName": "DiffOptions", - "isOptional": true, - "comment": "Structure with options to influence diff or NULL for defaults." - } - ], - "isAsync": true, - "isConstructorMethod": true, - "isPrototypeMethod": false, - "jsFunctionName": "indexToWorkdir", - "cppFunctionName": "IndexToWorkdir", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Create a diff list between the repository index and the workdir directory.

\n" - } - ] - }, - { - "filename": "indexer.h", - "ignore": true, - "jsClassName": "Indexer", - "cppClassName": "Indexer", - "cType": "git_indexer", - "freeFunctionName": "git_indexer_free", - "functions": [ - { - "cFunctionName": "git_indexer_stream_new", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_indexer_stream **", - "cppClassName": "IndexerStream", - "jsClassName": "IndexerStream", - "comment": "where to store the indexer instance" - }, - { - "name": "path", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "to the directory where the packfile should be stored" - }, - { - "name": "progress_cb", - "cType": "git_transfer_progress_callback", - "cppClassName": "TransferProgressCallback", - "jsClassName": "TransferProgressCallback", - "comment": "function to call with progress information" - }, - { - "name": "progress_cb_payload", - "cType": "void *", - "cppClassName": "void", - "jsClassName": "void", - "comment": "payload for the progress callback" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "streamNew", - "cppFunctionName": "StreamNew", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Create a new streaming indexer instance

\n" - }, - { - "cFunctionName": "git_indexer_stream_add", - "args": [ - { - "name": "idx", - "cType": "git_indexer_stream *", - "cppClassName": "IndexerStream", - "jsClassName": "IndexerStream", - "comment": "the indexer" - }, - { - "name": "data", - "cType": "const void *", - "cppClassName": "void", - "jsClassName": "void", - "comment": "the data to add" - }, - { - "name": "size", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "the size of the data in bytes" - }, - { - "name": "stats", - "cType": "git_transfer_progress *", - "cppClassName": "TransferProgress", - "jsClassName": "TransferProgress", - "comment": "stat storage" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "streamAdd", - "cppFunctionName": "StreamAdd", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Add data to the indexer

\n" - }, - { - "cFunctionName": "git_indexer_stream_finalize", - "args": [ - { - "name": "idx", - "cType": "git_indexer_stream *", - "cppClassName": "IndexerStream", - "jsClassName": "IndexerStream", - "comment": "the indexer" - }, - { - "name": "stats", - "cType": "git_transfer_progress *", - "cppClassName": "TransferProgress", - "jsClassName": "TransferProgress" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "streamFinalize", - "cppFunctionName": "StreamFinalize", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Finalize the pack and index

\n" - }, - { - "cFunctionName": "git_indexer_stream_hash", - "args": [ - { - "name": "idx", - "cType": "const git_indexer_stream *", - "cppClassName": "IndexerStream", - "jsClassName": "IndexerStream", - "comment": "the indexer instance" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "streamHash", - "cppFunctionName": "StreamHash", - "return": { - "cType": "const git_oid *", - "cppClassName": "GitOid", - "copy": "git_oid_dup", - "jsClassName": "Oid" - }, - "description": "

Get the packfile's hash

\n" - }, - { - "cFunctionName": "git_indexer_stream_free", - "args": [ - { - "name": "idx", - "cType": "git_indexer_stream *", - "cppClassName": "IndexerStream", - "jsClassName": "IndexerStream", - "comment": "the indexer to free" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "isFree": true, - "jsFunctionName": "streamFree", - "cppFunctionName": "StreamFree", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Free the indexer and its resources

\n" - } - ] - }, - { - "filename": "inttypes.h", - "ignore": true, - "jsClassName": "Inttypes", - "cppClassName": "Inttypes", - "cType": "git_inttypes", - "freeFunctionName": "git_inttypes_free", - "functions": [] - }, - { - "filename": "merge.h", - "ignore": true, - "jsClassName": "Merge", - "cppClassName": "Merge", - "cType": "git_merge", - "freeFunctionName": "git_merge_free", - "functions": [ - { - "cFunctionName": "git_merge_base", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "shouldAlloc": true, - "comment": "the OID of a merge base between 'one' and 'two'" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "the repository where the commits exist" - }, - { - "name": "one", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "one of the commits" - }, - { - "name": "two", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "the other commit" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "base", - "cppFunctionName": "Base", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "Zero on success; GIT_ENOTFOUND or -1 on failure.", - "jsClassName": "Number" - }, - "description": "

Find a merge base between two commits

\n" - }, - { - "cFunctionName": "git_merge_base_many", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "shouldAlloc": true, - "comment": "the OID of a merge base considering all the commits" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "the repository where the commits exist" - }, - { - "name": "length", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "The number of commits in the provided `input_array`" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "baseMany", - "cppFunctionName": "BaseMany", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "Zero on success; GIT_ENOTFOUND or -1 on failure.", - "jsClassName": "Number" - }, - "description": "

Find a merge base given a list of commits

\n" - } - ] - }, - { - "filename": "message.h", - "ignore": true, - "jsClassName": "Message", - "cppClassName": "Message", - "cType": "git_message", - "freeFunctionName": "git_message_free", - "functions": [ - { - "cFunctionName": "git_message_prettify", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "The user-allocated buffer which will be filled with the cleaned up message. Pass NULL if you just want to get the needed size of the prettified message as the output value." - }, - { - "name": "out_size", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "Size of the `out` buffer in bytes." - }, - { - "name": "message", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "The message to be prettified." - }, - { - "name": "strip_comments", - "cType": "int", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "Non-zero to remove lines starting with \"#\", 0 to leave them in." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "prettify", - "cppFunctionName": "Prettify", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "-1 on error, else number of characters in prettified message including the trailing NUL byte", - "jsClassName": "Number" - }, - "description": "

Clean up message from excess whitespace and make sure that the last line\nends with a '\\n'.

\n" - } - ] - }, - { - "filename": "net.h", - "ignore": true, - "jsClassName": "Net", - "cppClassName": "Net", - "cType": "git_net", - "freeFunctionName": "git_net_free", - "functions": [] - }, - { - "filename": "notes.h", - "ignore": true, - "jsClassName": "Notes", - "cppClassName": "Notes", - "cType": "git_notes", - "freeFunctionName": "git_notes_free", - "functions": [ - { - "cFunctionName": "git_note_iterator_new", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_note_iterator **", - "cppClassName": "NoteIterator", - "jsClassName": "NoteIterator", - "comment": "pointer to the iterator" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "repository where to look up the note" - }, - { - "name": "notes_ref", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "canonical name of the reference to use (optional); defaults to \"refs/notes/commits\"" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "gitNoteIteratorNew", - "cppFunctionName": "GitNoteIteratorNew", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Creates a new iterator for notes

\n" - }, - { - "cFunctionName": "git_note_iterator_free", - "args": [ - { - "name": "it", - "cType": "git_note_iterator *", - "cppClassName": "NoteIterator", - "jsClassName": "NoteIterator", - "comment": "pointer to the iterator" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "isFree": true, - "jsFunctionName": "gitNoteIteratorFree", - "cppFunctionName": "GitNoteIteratorFree", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Frees an git_note_iterator

\n" - }, - { - "cFunctionName": "git_note_next", - "args": [ - { - "name": "note_id", - "cType": "git_oid*", - "cppClassName": "Oid*", - "jsClassName": "Oid*", - "comment": "id of blob containing the message" - }, - { - "name": "annotated_id", - "cType": "git_oid*", - "cppClassName": "Oid*", - "jsClassName": "Oid*", - "comment": "id of the git object being annotated" - }, - { - "name": "it", - "cType": "git_note_iterator *", - "cppClassName": "NoteIterator", - "jsClassName": "NoteIterator", - "comment": "pointer to the iterator" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "gitNoteNext", - "cppFunctionName": "GitNoteNext", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 (no error), GIT_ITEROVER (iteration is done) or an error code (negative value)", - "jsClassName": "Number" - }, - "description": "

Returns the current item (note_id and annotated_id) and advance the iterator\ninternally to the next value

\n" - }, - { - "cFunctionName": "git_note_read", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_note **", - "cppClassName": "Note", - "jsClassName": "Note", - "comment": "pointer to the read note; NULL in case of error" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "repository where to look up the note" - }, - { - "name": "notes_ref", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "canonical name of the reference to use (optional); defaults to \"refs/notes/commits\"" - }, - { - "name": "oid", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "OID of the git object to read the note from" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "gitNoteRead", - "cppFunctionName": "GitNoteRead", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Read the note for an object

\n" - }, - { - "cFunctionName": "git_note_message", - "args": [ - { - "name": "note", - "cType": "const git_note *", - "cppClassName": "Note", - "jsClassName": "Note", - "comment": "" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "gitNoteMessage", - "cppFunctionName": "GitNoteMessage", - "return": { - "cType": "const char *", - "cppClassName": "String", - "comment": "the note message", - "jsClassName": "String" - }, - "description": "

Get the note message

\n" - }, - { - "cFunctionName": "git_note_oid", - "args": [ - { - "name": "note", - "cType": "const git_note *", - "cppClassName": "Note", - "jsClassName": "Note", - "comment": "" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "gitNoteOid", - "cppFunctionName": "GitNoteOid", - "return": { - "cType": "const git_oid *", - "cppClassName": "GitOid", - "copy": "git_oid_dup", - "comment": "the note object OID", - "jsClassName": "Oid" - }, - "description": "

Get the note object OID

\n" - }, - { - "cFunctionName": "git_note_create", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "shouldAlloc": true, - "comment": "pointer to store the OID (optional); NULL in case of error" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "repository where to store the note" - }, - { - "name": "author", - "cType": "const git_signature *", - "cppClassName": "GitSignature", - "jsClassName": "Signature", - "comment": "signature of the notes commit author" - }, - { - "name": "committer", - "cType": "const git_signature *", - "cppClassName": "GitSignature", - "jsClassName": "Signature", - "comment": "signature of the notes commit committer" - }, - { - "name": "notes_ref", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "canonical name of the reference to use (optional); defaults to \"refs/notes/commits\"" - }, - { - "name": "oid", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "OID of the git object to decorate" - }, - { - "name": "note", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "s_ref canonical name of the reference to use (optional); defaults to \"refs/notes/commits\"" - }, - { - "name": "force", - "cType": "int", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "Overwrite existing note" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "gitNoteCreate", - "cppFunctionName": "GitNoteCreate", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Add a note for an object

\n" - }, - { - "cFunctionName": "git_note_remove", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "repository where the note lives" - }, - { - "name": "notes_ref", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "canonical name of the reference to use (optional); defaults to \"refs/notes/commits\"" - }, - { - "name": "author", - "cType": "const git_signature *", - "cppClassName": "GitSignature", - "jsClassName": "Signature", - "comment": "signature of the notes commit author" - }, - { - "name": "committer", - "cType": "const git_signature *", - "cppClassName": "GitSignature", - "jsClassName": "Signature", - "comment": "signature of the notes commit committer" - }, - { - "name": "oid", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "OID of the git object to remove the note from" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "gitNoteRemove", - "cppFunctionName": "GitNoteRemove", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Remove the note for an object

\n" - }, - { - "cFunctionName": "git_note_free", - "args": [ - { - "name": "note", - "cType": "git_note *", - "cppClassName": "Note", - "jsClassName": "Note", - "comment": "git_note object" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "isFree": true, - "jsFunctionName": "gitNoteFree", - "cppFunctionName": "GitNoteFree", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Free a git_note object

\n" - }, - { - "cFunctionName": "git_note_default_ref", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "const char **", - "cppClassName": "String", - "jsClassName": "String", - "comment": "Pointer to the default notes reference" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "The Git repository" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "gitNoteDefaultRef", - "cppFunctionName": "GitNoteDefaultRef", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Get the default notes reference for a repository

\n" - }, - { - "cFunctionName": "git_note_foreach", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "Repository where to find the notes." - }, - { - "name": "notes_ref", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "Reference to read from (optional); defaults to \"refs/notes/commits\"." - }, - { - "name": "note_cb", - "cType": "git_note_foreach_cb", - "cppClassName": "NoteForeachCb", - "jsClassName": "NoteForeachCb", - "comment": "Callback to invoke per found annotation. Return non-zero to stop looping." - }, - { - "name": "payload", - "cType": "void *", - "cppClassName": "void", - "jsClassName": "void", - "comment": "Extra parameter to callback function." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "gitNoteForeach", - "cppFunctionName": "GitNoteForeach", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_EUSER on non-zero callback, or error code", - "jsClassName": "Number" - }, - "description": "

Loop over all the notes within a specified namespace\nand issue a callback for each one.

\n" - } - ] - }, - { - "filename": "object.h", - "dependencies": [ - "../include/oid.h", - "../include/repo.h" - ], - "jsClassName": "Object", - "cppClassName": "GitObject", - "cType": "git_object", - "freeFunctionName": "git_object_free", - "functions": [ - { - "cFunctionName": "git_object_id", - "args": [ - { - "name": "obj", - "cType": "const git_object *", - "cppClassName": "GitObject", - "jsClassName": "Object", - "isSelf": true, - "comment": "the repository object" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "oid", - "cppFunctionName": "Oid", - "return": { - "cType": "const git_oid *", - "cppClassName": "GitOid", - "copy": "git_oid_dup", - "comment": "the SHA1 id", - "jsClassName": "Oid" - }, - "description": "

Get the id (SHA1) of a repository object

\n" - }, - { - "cFunctionName": "git_object_type", - "args": [ - { - "name": "obj", - "cType": "const git_object *", - "cppClassName": "GitObject", - "jsClassName": "Object", - "isSelf": true, - "comment": "the repository object" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "type", - "cppFunctionName": "Type", - "return": { - "cType": "git_otype", - "cppClassName": "Number", - "comment": "the object's type", - "jsClassName": "Number" - }, - "description": "

Get the object type of an object

\n" - }, - { - "cFunctionName": "git_object_owner", - "args": [ - { - "name": "obj", - "cType": "const git_object *", - "cppClassName": "GitObject", - "jsClassName": "Object", - "isSelf": true, - "comment": "the object" - } - ], - "ignore": "Never make public for memory allocation reasons", - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "owner", - "cppFunctionName": "Owner", - "return": { - "cType": "git_repository *", - "cppClassName": "GitRepo", - "comment": "the repository who owns this object", - "jsClassName": "Repository" - }, - "description": "

Get the repository that owns this object

\n" - }, - { - "cFunctionName": "git_object_free", - "args": [ - { - "name": "object", - "cType": "git_object *", - "cppClassName": "GitObject", - "jsClassName": "Object", - "comment": "the object to close" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "isFree": true, - "jsFunctionName": "free", - "cppFunctionName": "Free", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Close an open object

\n" - }, - { - "cFunctionName": "git_object_type2string", - "args": [ - { - "name": "type", - "cType": "git_otype", - "cppClassName": "Number", - "jsClassName": "Number", - "comment": "object type to convert." - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "type2string", - "cppFunctionName": "Type2string", - "return": { - "cType": "const char *", - "cppClassName": "String", - "comment": "the corresponding string representation.", - "jsClassName": "String" - }, - "description": "

Convert an object type to it's string representation.

\n" - }, - { - "cFunctionName": "git_object_string2type", - "args": [ - { - "name": "str", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the string to convert." - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "string2type", - "cppFunctionName": "String2type", - "return": { - "cType": "git_otype", - "cppClassName": "Number", - "comment": "the corresponding git_otype.", - "jsClassName": "Number" - }, - "description": "

Convert a string object type representation to it's git_otype.

\n" - }, - { - "cFunctionName": "git_object_typeisloose", - "args": [ - { - "name": "type", - "cType": "git_otype", - "cppClassName": "Number", - "jsClassName": "Number", - "comment": "object type to test." - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "typeisloose", - "cppFunctionName": "Typeisloose", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "true if the type represents a valid loose object type, false otherwise.", - "jsClassName": "Number" - }, - "description": "

Determine if the given git_otype is a valid loose object type.

\n" - }, - { - "cFunctionName": "git_object__size", - "args": [ - { - "name": "type", - "cType": "git_otype", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "object type to get its size" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "Size", - "cppFunctionName": "Size", - "return": { - "cType": "size_t", - "cppClassName": "Uint32", - "comment": "size in bytes of the object", - "jsClassName": "Number" - }, - "description": "

Get the size in bytes for the structure which\nacts as an in-memory representation of any given\nobject type.

\n" - }, - { - "cFunctionName": "git_object_peel", - "args": [ - { - "name": "peeled", - "cType": "git_object **", - "cppClassName": "GitObject", - "jsClassName": "Object", - "isReturn": true, - "comment": "Pointer to the peeled git_object" - }, - { - "name": "object", - "cType": "const git_object *", - "cppClassName": "GitObject", - "jsClassName": "Object", - "isSelf": true, - "comment": "The object to be processed" - }, - { - "name": "target_type", - "cType": "git_otype", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "The type of the requested object (GIT_OBJ_COMMIT, GIT_OBJ_TAG, GIT_OBJ_TREE, GIT_OBJ_BLOB or GIT_OBJ_ANY)." - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "peel", - "cppFunctionName": "Peel", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_EAMBIGUOUS, GIT_ENOTFOUND or an error code", - "jsClassName": "Number" - }, - "description": "

Recursively peel an object until an object of the specified type is met.

\n" - }, - { - "cFunctionName": "git_object_dup", - "args": [ - { - "name": "dest", - "cType": "git_object **", - "cppClassName": "GitObject", - "jsClassName": "Object", - "isReturn": true, - "comment": "Pointer to store the copy of the object" - }, - { - "name": "source", - "cType": "git_object *", - "cppClassName": "GitObject", - "jsClassName": "Object", - "ignore": true, - "isSelf": true, - "comment": "Original object to copy" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "dup", - "cppFunctionName": "Dup", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Create an in-memory copy of a Git object. The copy must be\nexplicitly free'd or it will leak.

\n" - } - ] - }, - { - "filename": "odb.h", - "dependencies": [ - "../include/oid.h", - "../include/odb_object.h", - "node_buffer.h" - ], - "jsClassName": "Odb", - "cppClassName": "GitOdb", - "cType": "git_odb", - "freeFunctionName": "git_odb_free", - "functions": [ - { - "cFunctionName": "git_odb_new", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_odb **", - "cppClassName": "GitOdb", - "jsClassName": "Odb", - "comment": "location to store the database pointer, if opened. Set to NULL if the open failed." - } - ], - "isAsync": false, - "isConstructorMethod": true, - "isPrototypeMethod": false, - "jsFunctionName": "create()", - "cppFunctionName": "Create", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Create a new object database with no backends.

\n" - }, - { - "cFunctionName": "git_odb_open", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_odb **", - "cppClassName": "GitOdb", - "jsClassName": "Odb", - "comment": "location to store the database pointer, if opened. Set to NULL if the open failed." - }, - { - "name": "objects_dir", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "path of the backends' \"objects\" directory." - } - ], - "isAsync": false, - "isConstructorMethod": true, - "isPrototypeMethod": false, - "jsFunctionName": "open", - "cppFunctionName": "Open", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Create a new object database and automatically add\nthe two default backends:

\n" - }, - { - "cFunctionName": "git_odb_add_backend", - "args": [ - { - "name": "odb", - "cType": "git_odb *", - "cppClassName": "GitOdb", - "jsClassName": "Odb", - "isSelf": true, - "comment": "database to add the backend to" - }, - { - "name": "backend", - "cType": "git_odb_backend *", - "cppClassName": "OdbBackend", - "jsClassName": "OdbBackend", - "comment": "pointer to a git_odb_backend instance" - }, - { - "name": "priority", - "cType": "int", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "Value for ordering the backends queue" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "addBackend", - "cppFunctionName": "AddBackend", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success; error code otherwise", - "jsClassName": "Number" - }, - "description": "

Add a custom backend to an existing Object DB

\n" - }, - { - "cFunctionName": "git_odb_add_alternate", - "args": [ - { - "name": "odb", - "cType": "git_odb *", - "cppClassName": "GitOdb", - "jsClassName": "Odb", - "isSelf": true, - "comment": "database to add the backend to" - }, - { - "name": "backend", - "cType": "git_odb_backend *", - "cppClassName": "OdbBackend", - "jsClassName": "OdbBackend", - "comment": "pointer to a git_odb_backend instance" - }, - { - "name": "priority", - "cType": "int", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "Value for ordering the backends queue" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "addAlternate", - "cppFunctionName": "AddAlternate", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success; error code otherwise", - "jsClassName": "Number" - }, - "description": "

Add a custom backend to an existing Object DB; this\nbackend will work as an alternate.

\n" - }, - { - "cFunctionName": "git_odb_add_disk_alternate", - "args": [ - { - "name": "odb", - "cType": "git_odb *", - "cppClassName": "GitOdb", - "jsClassName": "Odb", - "isSelf": true, - "comment": "database to add the backend to" - }, - { - "name": "path", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "path to the objects folder for the alternate" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "addDiskAlternate", - "cppFunctionName": "AddDiskAlternate", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success; error code otherwise", - "jsClassName": "Number" - }, - "description": "

Add an on-disk alternate to an existing Object DB.

\n" - }, - { - "cFunctionName": "git_odb_free", - "args": [ - { - "name": "db", - "cType": "git_odb *", - "cppClassName": "GitOdb", - "jsClassName": "Odb", - "comment": "database pointer to close. If NULL no action is taken." - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "isFree": true, - "jsFunctionName": "free", - "cppFunctionName": "Free", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Close an open object database.

\n" - }, - { - "cFunctionName": "git_odb_read", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_odb_object **", - "cppClassName": "GitOdbObject", - "jsClassName": "OdbObject", - "comment": "pointer where to store the read object" - }, - { - "name": "db", - "cType": "git_odb *", - "cppClassName": "GitOdb", - "jsClassName": "Odb", - "isSelf": true, - "comment": "database to search for the object in." - }, - { - "name": "id", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "identity of the object to read." - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "read", - "cppFunctionName": "Read", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "- 0 if the object was read; - GIT_ENOTFOUND if the object is not in the database.", - "jsClassName": "Number" - }, - "description": "

Read an object from the database.

\n" - }, - { - "cFunctionName": "git_odb_read_prefix", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_odb_object **", - "cppClassName": "GitOdbObject", - "jsClassName": "OdbObject", - "comment": "pointer where to store the read object" - }, - { - "name": "db", - "cType": "git_odb *", - "cppClassName": "GitOdb", - "jsClassName": "Odb", - "comment": "database to search for the object in." - }, - { - "name": "short_id", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "a prefix of the id of the object to read." - }, - { - "name": "len", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "the length of the prefix" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "readPrefix", - "cppFunctionName": "ReadPrefix", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "- 0 if the object was read; - GIT_ENOTFOUND if the object is not in the database. - GIT_EAMBIGUOUS if the prefix is ambiguous (several objects match the prefix)", - "jsClassName": "Number" - }, - "description": "

Read an object from the database, given a prefix\nof its identifier.

\n" - }, - { - "cFunctionName": "git_odb_read_header", - "args": [ - { - "name": "len_out", - "isReturn": true, - "cType": "size_t *", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "pointer where to store the length" - }, - { - "name": "type_out", - "isReturn": true, - "cType": "git_otype *", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "pointer where to store the type" - }, - { - "name": "db", - "cType": "git_odb *", - "cppClassName": "GitOdb", - "jsClassName": "Odb", - "comment": "database to search for the object in." - }, - { - "name": "id", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "identity of the object to read." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "readHeader", - "cppFunctionName": "ReadHeader", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "- 0 if the object was read; - GIT_ENOTFOUND if the object is not in the database.", - "jsClassName": "Number" - }, - "description": "

Read the header of an object from the database, without\nreading its full contents.

\n" - }, - { - "cFunctionName": "git_odb_exists", - "args": [ - { - "name": "db", - "cType": "git_odb *", - "cppClassName": "GitOdb", - "jsClassName": "Odb", - "isSelf": true, - "comment": "database to be searched for the given object." - }, - { - "name": "id", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "the object to search for." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "exists", - "cppFunctionName": "Exists", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "- 1, if the object was found - 0, otherwise", - "jsClassName": "Number" - }, - "description": "

Determine if the given object can be found in the object database.

\n" - }, - { - "cFunctionName": "git_odb_refresh", - "args": [ - { - "name": "db", - "cType": "struct git_odb *", - "cppClassName": "GitOdb", - "jsClassName": "Odb", - "isSelf": true, - "comment": "database to refresh" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "refresh", - "cppFunctionName": "Refresh", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, error code otherwise", - "jsClassName": "Number" - }, - "description": "

Refresh the object database to load newly added files.

\n" - }, - { - "cFunctionName": "git_odb_foreach", - "args": [ - { - "name": "db", - "cType": "git_odb *", - "cppClassName": "GitOdb", - "jsClassName": "Odb", - "isSelf": true, - "comment": "database to use" - }, - { - "name": "cb", - "cType": "git_odb_foreach_cb", - "cppClassName": "OdbForeachCb", - "jsClassName": "OdbForeachCb", - "comment": "the callback to call for each object" - }, - { - "name": "payload", - "cType": "void *", - "cppClassName": "void", - "jsClassName": "void", - "comment": "data to pass to the callback" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "foreach", - "cppFunctionName": "Foreach", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_EUSER on non-zero callback, or error code", - "jsClassName": "Number" - }, - "description": "

List all objects available in the database

\n" - }, - { - "cFunctionName": "git_odb_write", - "args": [ - { - "name": "out", - "isReturn": true, - "shouldAlloc": true, - "cType": "git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "pointer to store the OID result of the write" - }, - { - "name": "odb", - "cType": "git_odb *", - "cppClassName": "GitOdb", - "jsClassName": "Odb", - "isSelf": true, - "comment": "object database where to store the object" - }, - { - "name": "data", - "cType": "const void *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "buffer with the data to store" - }, - { - "name": "len", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "size of the buffer" - }, - { - "name": "type", - "cType": "git_otype", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "type of the data to store" - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "write", - "cppFunctionName": "Write", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Write an object directly into the ODB

\n" - }, - { - "cFunctionName": "git_odb_open_wstream", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_odb_stream **", - "cppClassName": "GitOdbStream", - "jsClassName": "OdbStream", - "comment": "pointer where to store the stream" - }, - { - "name": "db", - "cType": "git_odb *", - "cppClassName": "GitOdb", - "jsClassName": "Odb", - "comment": "object database where the stream will write" - }, - { - "name": "size", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "final size of the object that will be written" - }, - { - "name": "type", - "cType": "git_otype", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "type of the object that will be written" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "openWstream", - "cppFunctionName": "OpenWstream", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 if the stream was created; error code otherwise", - "jsClassName": "Number" - }, - "description": "

Open a stream to write an object into the ODB

\n" - }, - { - "cFunctionName": "git_odb_open_rstream", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_odb_stream **", - "cppClassName": "GitOdbStream", - "jsClassName": "OdbStream", - "comment": "pointer where to store the stream" - }, - { - "name": "db", - "cType": "git_odb *", - "cppClassName": "GitOdb", - "jsClassName": "Odb", - "comment": "object database where the stream will read from" - }, - { - "name": "oid", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "oid of the object the stream will read from" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "openRstream", - "cppFunctionName": "OpenRstream", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 if the stream was created; error code otherwise", - "jsClassName": "Number" - }, - "description": "

Open a stream to read an object from the ODB

\n" - }, - { - "cFunctionName": "git_odb_write_pack", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_odb_writepack **", - "cppClassName": "OdbWritepack", - "jsClassName": "OdbWritepack", - "comment": "pointer to the writepack functions" - }, - { - "name": "db", - "cType": "git_odb *", - "cppClassName": "GitOdb", - "jsClassName": "Odb", - "comment": "object database where the stream will read from" - }, - { - "name": "progress_cb", - "cType": "git_transfer_progress_callback", - "cppClassName": "TransferProgressCallback", - "jsClassName": "TransferProgressCallback", - "comment": "function to call with progress information. Be aware that this is called inline with network and indexing operations, so performance may be affected." - }, - { - "name": "progress_payload", - "cType": "void *", - "cppClassName": "void", - "jsClassName": "void", - "comment": "payload for the progress callback" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "writePack", - "cppFunctionName": "WritePack", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Open a stream for writing a pack file to the ODB.

\n" - }, - { - "cFunctionName": "git_odb_hash", - "args": [ - { - "name": "out", - "isReturn": true, - "shouldAlloc": true, - "cType": "git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "the resulting object-ID." - }, - { - "name": "data", - "cType": "const void *", - "cppClassName": "Buffer", - "jsClassName": "Buffer", - "comment": "data to hash" - }, - { - "name": "len", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "size of the data" - }, - { - "name": "type", - "cType": "git_otype", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "of the data to hash" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "hash", - "cppFunctionName": "Hash", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Determine the object-ID (sha1 hash) of a data buffer

\n" - }, - { - "cFunctionName": "git_odb_hashfile", - "args": [ - { - "name": "out", - "isReturn": true, - "shouldAlloc": true, - "cType": "git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "oid structure the result is written into." - }, - { - "name": "path", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "file to read and determine object id for" - }, - { - "name": "type", - "cType": "git_otype", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "the type of the object that will be hashed" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "hashfile", - "cppFunctionName": "Hashfile", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Read a file from disk and fill a git_oid with the object id\nthat the file would have if it were written to the Object\nDatabase as an object of the given type (w/o applying filters).\nSimilar functionality to git.git's git hash-object without\nthe -w flag, however, with the --no-filters flag.\nIf you need filters, see git_repository_hashfile.

\n" - } - ] - }, - { - "filename": "odb_object.h", - "dependencies": [ - "../include/wrapper.h", - "../include/oid.h" - ], - "jsClassName": "OdbObject", - "cppClassName": "GitOdbObject", - "cType": "git_odb_object", - "freeFunctionName": "git_odb_object_free", - "functions": [ - { - "cFunctionName": "git_odb_object_data", - "args": [ - { - "name": "object", - "cType": "git_odb_object *", - "cppClassName": "GitOdbObject", - "jsClassName": "OdbObject", - "isSelf": true, - "comment": "the object" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "data", - "cppFunctionName": "Data", - "return": { - "cType": "const void *", - "cppClassName": "Wrapper", - "comment": "a pointer to the data" - }, - "description": "

Return the data of an ODB object

\n" - }, - { - "cFunctionName": "git_odb_object_size", - "args": [ - { - "name": "object", - "cType": "git_odb_object *", - "cppClassName": "GitOdbObject", - "jsClassName": "OdbObject", - "isSelf": true, - "comment": "the object" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "size", - "cppFunctionName": "Size", - "return": { - "cType": "size_t", - "cppClassName": "Uint32", - "comment": "the size", - "jsClassName": "Number" - }, - "description": "

Return the size of an ODB object

\n" - }, - { - "cFunctionName": "git_odb_object_type", - "args": [ - { - "name": "object", - "cType": "git_odb_object *", - "cppClassName": "GitOdbObject", - "jsClassName": "OdbObject", - "isSelf": true, - "comment": "the object" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "type", - "cppFunctionName": "Type", - "return": { - "cType": "git_otype", - "cppClassName": "Int32", - "comment": "the type", - "jsClassName": "Number" - }, - "description": "

Return the type of an ODB object

\n" - }, - { - "cFunctionName": "git_odb_object_free", - "args": [ - { - "name": "object", - "cType": "git_odb_object *", - "cppClassName": "GitOdbObject", - "jsClassName": "OdbObject", - "isSelf": true, - "comment": "object to close" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "isFree": true, - "jsFunctionName": "free", - "cppFunctionName": "Free", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Close an ODB object

\n" - }, - { - "cFunctionName": "git_odb_object_id", - "args": [ - { - "name": "object", - "cType": "git_odb_object *", - "cppClassName": "GitOdbObject", - "jsClassName": "OdbObject", - "isSelf": true, - "comment": "the object" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "oid", - "cppFunctionName": "Oid", - "return": { - "cType": "const git_oid *", - "cppClassName": "GitOid", - "copy": "git_oid_dup", - "comment": "a pointer to the OID", - "jsClassName": "Oid" - }, - "description": "

Return the OID of an ODB object

\n" - } - ] - }, - { - "filename": "odb_backend.h", - "ignore": true, - "jsClassName": "OdbBackend", - "cppClassName": "OdbBackend", - "cType": "git_odb_backend", - "freeFunctionName": "git_odb_backend_free", - "functions": [ - { - "cFunctionName": "git_odb_backend_pack", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_odb_backend **", - "cppClassName": "OdbBackend", - "jsClassName": "OdbBackend", - "comment": "location to store the odb backend pointer" - }, - { - "name": "objects_dir", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the Git repository's objects directory" - } - ], - "isAsync": false, - "isConstructorMethod": true, - "isPrototypeMethod": false, - "jsFunctionName": "pack", - "cppFunctionName": "Pack", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Create a backend for the packfiles.

\n" - } - ] - }, - { - "filename": "oid.h", - "dependencies": [], - "jsClassName": "Oid", - "cppClassName": "GitOid", - "cType": "git_oid", - "freeFunctionName": "free", - "functions": [ - { - "cFunctionName": "git_oid_fromstr", - "args": [ - { - "name": "out", - "isReturn": true, - "shouldAlloc": true, - "cType": "git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "oid structure the result is written into." - }, - { - "name": "str", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "input hex string; must be pointing at the start of the hex sequence and have at least the number of bytes needed for an oid encoded in hex (40 bytes)." - } - ], - "isAsync": false, - "isConstructorMethod": true, - "isPrototypeMethod": false, - "jsFunctionName": "fromString", - "cppFunctionName": "FromString", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Parse a hex formatted object id into a git_oid.

\n" - }, - { - "cFunctionName": "git_oid_fromstrp", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "shouldAlloc": true, - "comment": "oid structure the result is written into." - }, - { - "name": "str", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "input hex string; must be at least 4 characters long and null-terminated." - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": true, - "isPrototypeMethod": false, - "jsFunctionName": "fromstrp", - "cppFunctionName": "Fromstrp", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Parse a hex formatted null-terminated string into a git_oid.

\n" - }, - { - "cFunctionName": "git_oid_fromstrn", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "shouldAlloc": true, - "comment": "oid structure the result is written into." - }, - { - "name": "str", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "input hex string of at least size `length`" - }, - { - "name": "length", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "length of the input string" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": true, - "isPrototypeMethod": false, - "jsFunctionName": "fromstrn", - "cppFunctionName": "Fromstrn", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Parse N characters of a hex formatted object id into a git_oid

\n" - }, - { - "cFunctionName": "git_oid_fromraw", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "shouldAlloc": true, - "comment": "oid structure the result is written into." - }, - { - "name": "raw", - "cType": "const unsigned char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the raw input bytes to be copied." - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": true, - "isPrototypeMethod": false, - "jsFunctionName": "fromraw", - "cppFunctionName": "Fromraw", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Copy an already raw oid into a git_oid structure.

\n" - }, - { - "cFunctionName": "git_oid_fmt", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "output hex string; must be pointing at the start of the hex sequence and have at least the number of bytes needed for an oid encoded in hex (40 bytes). Only the oid digits are written; a '\\\\0' terminator must be added by the caller if it is required." - }, - { - "name": "id", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "isSelf": true, - "comment": "oid structure to format." - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "fmt", - "cppFunctionName": "Fmt", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Format a git_oid into a hex string.

\n" - }, - { - "cFunctionName": "git_oid_pathfmt", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "output hex string; must be pointing at the start of the hex sequence and have at least the number of bytes needed for an oid encoded in hex (41 bytes). Only the oid digits are written; a '\\\\0' terminator must be added by the caller if it is required." - }, - { - "name": "id", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "oid structure to format." - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "pathfmt", - "cppFunctionName": "Pathfmt", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Format a git_oid into a loose-object path string.

\n" - }, - { - "cFunctionName": "git_oid_allocfmt", - "args": [ - { - "name": "id", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "isSelf": true, - "comment": "the oid structure to format" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "sha", - "cppFunctionName": "Sha", - "return": { - "cType": "char *", - "cppClassName": "String", - "comment": "the c-string; NULL if memory is exhausted. Caller must deallocate the string with git__free().", - "jsClassName": "String", - "freeFunctionName": "free" - }, - "description": "

Format a git_oid into a newly allocated c-string.

\n" - }, - { - "cFunctionName": "git_oid_tostr", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the buffer into which the oid string is output." - }, - { - "name": "n", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "the size of the out buffer." - }, - { - "name": "id", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "the oid structure to format." - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "sha", - "cppFunctionName": "Sha", - "return": { - "cType": "char *", - "cppClassName": "String", - "comment": "the out buffer pointer, assuming no input parameter errors, otherwise a pointer to an empty string.", - "jsClassName": "String" - }, - "description": "

Format a git_oid into a buffer as a hex format c-string.

\n" - }, - { - "cFunctionName": "git_oid_cpy", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "shouldAlloc": true, - "comment": "oid structure the result is written into." - }, - { - "name": "src", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "isSelf": true, - "comment": "oid structure to copy from." - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "cpy", - "cppFunctionName": "Cpy", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Copy an oid from one structure to another.

\n" - }, - { - "cFunctionName": "git_oid_cmp", - "args": [ - { - "name": "a", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "isSelf": true, - "comment": "first oid structure." - }, - { - "name": "b", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "second oid structure." - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "cmp", - "cppFunctionName": "Cmp", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "<0, 0, >0 if a < b, a == b, a > b.", - "jsClassName": "Number" - }, - "description": "

Compare two oid structures.

\n" - }, - { - "cFunctionName": "git_oid_equal", - "args": [ - { - "name": "a", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "isSelf": true, - "comment": "first oid structure." - }, - { - "name": "b", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "second oid structure." - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "equal", - "cppFunctionName": "Equal", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "true if equal, false otherwise", - "jsClassName": "Number" - }, - "description": "

Compare two oid structures for equality

\n" - }, - { - "cFunctionName": "git_oid_ncmp", - "args": [ - { - "name": "a", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "isSelf": true, - "comment": "first oid structure." - }, - { - "name": "b", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "second oid structure." - }, - { - "name": "len", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "the number of hex chars to compare" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "ncmp", - "cppFunctionName": "Ncmp", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 in case of a match", - "jsClassName": "Number" - }, - "description": "

Compare the first 'len' hexadecimal characters (packets of 4 bits)\nof two oid structures.

\n" - }, - { - "cFunctionName": "git_oid_streq", - "args": [ - { - "name": "id", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "isSelf": true, - "comment": "oid structure." - }, - { - "name": "str", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "input hex string of an object id." - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "streq", - "cppFunctionName": "Streq", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "GIT_ENOTOID if str is not a valid hex string, 0 in case of a match, GIT_ERROR otherwise.", - "jsClassName": "Number" - }, - "description": "

Check if an oid equals an hex formatted object id.

\n" - }, - { - "cFunctionName": "git_oid_iszero", - "args": [ - { - "name": "id", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "isSelf": true - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "iszero", - "cppFunctionName": "Iszero", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "1 if all zeros, 0 otherwise.", - "jsClassName": "Number" - }, - "description": "

Check is an oid is all zeros.

\n" - }, - { - "cFunctionName": "git_oid_shorten_new", - "args": [ - { - "name": "min_length", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "The minimal length for all identifiers, which will be used even if shorter OIDs would still be unique." - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "shortenNew", - "cppFunctionName": "ShortenNew", - "return": { - "cType": "git_oid_shorten *", - "cppClassName": "OidShorten", - "copy": "fixme", - "comment": "a `git_oid_shorten` instance, NULL if OOM", - "jsClassName": "OidShorten" - }, - "description": "

Create a new OID shortener.

\n" - }, - { - "cFunctionName": "git_oid_shorten_add", - "args": [ - { - "name": "os", - "cType": "git_oid_shorten *", - "cppClassName": "OidShorten", - "jsClassName": "OidShorten", - "comment": "a `git_oid_shorten` instance" - }, - { - "name": "text_id", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "an OID in text form" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "shortenAdd", - "cppFunctionName": "ShortenAdd", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "the minimal length to uniquely identify all OIDs added so far to the set; or an error code (<0) if an error occurs.", - "jsClassName": "Number" - }, - "description": "

Add a new OID to set of shortened OIDs and calculate\nthe minimal length to uniquely identify all the OIDs in\nthe set.

\n" - }, - { - "cFunctionName": "git_oid_shorten_free", - "args": [ - { - "name": "os", - "cType": "git_oid_shorten *", - "cppClassName": "OidShorten", - "jsClassName": "OidShorten", - "comment": "a `git_oid_shorten` instance" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "isFree": true, - "jsFunctionName": "shortenFree", - "cppFunctionName": "ShortenFree", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Free an OID shortener instance

\n" - } - ] - }, - { - "filename": "pack.h", - "ignore": true, - "jsClassName": "Pack", - "cppClassName": "Pack", - "cType": "git_pack", - "freeFunctionName": "git_pack_free", - "functions": [ - { - "cFunctionName": "git_packbuilder_new", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_packbuilder **", - "cppClassName": "Packbuilder", - "jsClassName": "Packbuilder", - "comment": "The new packbuilder object" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "The repository" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "gitPackbuilderNew", - "cppFunctionName": "GitPackbuilderNew", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Initialize a new packbuilder

\n" - }, - { - "cFunctionName": "git_packbuilder_set_threads", - "args": [ - { - "name": "pb", - "cType": "git_packbuilder *", - "cppClassName": "Packbuilder", - "jsClassName": "Packbuilder", - "comment": "The packbuilder" - }, - { - "name": "n", - "cType": "unsigned int", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "Number of threads to spawn" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "gitPackbuilderSetThreads", - "cppFunctionName": "GitPackbuilderSetThreads", - "return": { - "cType": "unsigned int", - "cppClassName": "Uint32", - "comment": "number of actual threads to be used", - "jsClassName": "Number" - }, - "description": "

Set number of threads to spawn

\n" - }, - { - "cFunctionName": "git_packbuilder_insert", - "args": [ - { - "name": "pb", - "cType": "git_packbuilder *", - "cppClassName": "Packbuilder", - "jsClassName": "Packbuilder", - "comment": "The packbuilder" - }, - { - "name": "id", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "The oid of the commit" - }, - { - "name": "name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "The name; might be NULL" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "gitPackbuilderInsert", - "cppFunctionName": "GitPackbuilderInsert", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Insert a single object

\n" - }, - { - "cFunctionName": "git_packbuilder_insert_tree", - "args": [ - { - "name": "pb", - "cType": "git_packbuilder *", - "cppClassName": "Packbuilder", - "jsClassName": "Packbuilder", - "comment": "The packbuilder" - }, - { - "name": "id", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "The oid of the root tree" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "gitPackbuilderInsertTree", - "cppFunctionName": "GitPackbuilderInsertTree", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Insert a root tree object

\n" - }, - { - "cFunctionName": "git_packbuilder_write", - "args": [ - { - "name": "pb", - "cType": "git_packbuilder *", - "cppClassName": "Packbuilder", - "jsClassName": "Packbuilder", - "comment": "The packbuilder" - }, - { - "name": "file", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "gitPackbuilderWrite", - "cppFunctionName": "GitPackbuilderWrite", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Write the new pack and corresponding index file to path.

\n" - }, - { - "cFunctionName": "git_packbuilder_foreach", - "args": [ - { - "name": "pb", - "cType": "git_packbuilder *", - "cppClassName": "Packbuilder", - "jsClassName": "Packbuilder", - "comment": "the packbuilder" - }, - { - "name": "cb", - "cType": "git_packbuilder_foreach_cb", - "cppClassName": "PackbuilderForeachCb", - "jsClassName": "PackbuilderForeachCb", - "comment": "the callback to call with each packed object's buffer" - }, - { - "name": "payload", - "cType": "void *", - "cppClassName": "void", - "jsClassName": "void", - "comment": "the callback's data" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "gitPackbuilderForeach", - "cppFunctionName": "GitPackbuilderForeach", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Create the new pack and pass each object to the callback

\n" - }, - { - "cFunctionName": "git_packbuilder_object_count", - "args": [ - { - "name": "pb", - "cType": "git_packbuilder *", - "cppClassName": "Packbuilder", - "jsClassName": "Packbuilder", - "comment": "the packbuilder" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "gitPackbuilderObjectCount", - "cppFunctionName": "GitPackbuilderObjectCount", - "return": { - "cType": "uint32_t", - "cppClassName": "Uint32", - "jsClassName": "Number" - }, - "description": "

Get the total number of objects the packbuilder will write out

\n" - }, - { - "cFunctionName": "git_packbuilder_written", - "args": [ - { - "name": "pb", - "cType": "git_packbuilder *", - "cppClassName": "Packbuilder", - "jsClassName": "Packbuilder", - "comment": "the packbuilder" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "gitPackbuilderWritten", - "cppFunctionName": "GitPackbuilderWritten", - "return": { - "cType": "uint32_t", - "cppClassName": "Uint32", - "jsClassName": "Number" - }, - "description": "

Get the number of objects the packbuilder has already written out

\n" - }, - { - "cFunctionName": "git_packbuilder_free", - "args": [ - { - "name": "pb", - "cType": "git_packbuilder *", - "cppClassName": "Packbuilder", - "jsClassName": "Packbuilder", - "comment": "The packbuilder" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "isFree": true, - "jsFunctionName": "gitPackbuilderFree", - "cppFunctionName": "GitPackbuilderFree", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Free the packbuilder and all associated data

\n" - } - ] - }, - { - "filename": "push.h", - "ignore": true, - "jsClassName": "Push", - "cppClassName": "Push", - "cType": "git_push", - "freeFunctionName": "git_push_free", - "functions": [ - { - "cFunctionName": "git_push_new", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_push **", - "cppClassName": "Push", - "jsClassName": "Push", - "comment": "New push object" - }, - { - "name": "remote", - "cType": "git_remote *", - "cppClassName": "GitRemote", - "jsClassName": "Remote", - "comment": "Remote instance" - } - ], - "isAsync": false, - "isConstructorMethod": true, - "isPrototypeMethod": false, - "jsFunctionName": "new", - "cppFunctionName": "New", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Create a new push object

\n" - }, - { - "cFunctionName": "git_push_set_options", - "args": [ - { - "name": "push", - "cType": "git_push *", - "cppClassName": "Push", - "jsClassName": "Push", - "isSelf": true, - "comment": "The push object" - }, - { - "name": "opts", - "cType": "const git_push_options *", - "cppClassName": "PushOptions", - "jsClassName": "PushOptions", - "comment": "The options to set on the push object" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "setOptions", - "cppFunctionName": "SetOptions", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Set options on a push object

\n" - }, - { - "cFunctionName": "git_push_add_refspec", - "args": [ - { - "name": "push", - "cType": "git_push *", - "cppClassName": "Push", - "jsClassName": "Push", - "isSelf": true, - "comment": "The push object" - }, - { - "name": "refspec", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "Refspec string" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "addRefspec", - "cppFunctionName": "AddRefspec", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Add a refspec to be pushed

\n" - }, - { - "cFunctionName": "git_push_update_tips", - "args": [ - { - "name": "push", - "cType": "git_push *", - "cppClassName": "Push", - "jsClassName": "Push", - "isSelf": true, - "comment": "The push object" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "updateTips", - "cppFunctionName": "UpdateTips", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Update remote tips after a push

\n" - }, - { - "cFunctionName": "git_push_finish", - "args": [ - { - "name": "push", - "cType": "git_push *", - "cppClassName": "Push", - "jsClassName": "Push", - "isSelf": true, - "comment": "The push object" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "finish", - "cppFunctionName": "Finish", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Actually push all given refspecs

\n" - }, - { - "cFunctionName": "git_push_unpack_ok", - "args": [ - { - "name": "push", - "cType": "git_push *", - "cppClassName": "Push", - "jsClassName": "Push", - "isSelf": true, - "comment": "The push object" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "unpackOk", - "cppFunctionName": "UnpackOk", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "true if equal, false otherwise", - "jsClassName": "Number" - }, - "description": "

Check if remote side successfully unpacked

\n" - }, - { - "cFunctionName": "git_push_status_foreach", - "args": [ - { - "name": "push", - "cType": "git_push *", - "cppClassName": "Push", - "jsClassName": "Push", - "isSelf": true, - "comment": "The push object" - }, - { - "name": "cb", - "cType": "int (*)(const char *ref, const char *msg, void *data)", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "The callback to call on each object" - }, - { - "name": "data", - "cType": "void *", - "cppClassName": "void", - "jsClassName": "void" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "statusForeach", - "cppFunctionName": "StatusForeach", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_EUSER on non-zero callback, or error code", - "jsClassName": "Number" - }, - "description": "

Call callback `cb' on each status

\n" - }, - { - "cFunctionName": "git_push_free", - "args": [ - { - "name": "push", - "cType": "git_push *", - "cppClassName": "Push", - "jsClassName": "Push", - "comment": "The push object" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "isFree": true, - "jsFunctionName": "free", - "cppFunctionName": "Free", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Free the given push object

\n" - } - ] - }, - { - "filename": "refdb.h", - "jsClassName": "RefDb", - "cppClassName": "GitRefDb", - "cType": "git_refdb", - "note": "this should be git_refdb_free, but it's not available", - "freeFunctionName": "free", - "functions": [ - { - "cFunctionName": "git_reference__alloc", - "args": [ - { - "name": "refdb", - "cType": "git_refdb *", - "cppClassName": "GitRefDb", - "jsClassName": "RefDb", - "isSelf": true - }, - { - "name": "name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String" - }, - { - "name": "oid", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid" - }, - { - "name": "symbolic", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "gitReference_Alloc", - "cppFunctionName": "GitReference_Alloc", - "return": { - "cType": "git_reference *", - "cppClassName": "GitReference", - "copy": "fixme", - "comment": "the created git_reference or NULL on error", - "jsClassName": "Reference" - }, - "description": "

Create a new direct reference from an OID.

\n" - }, - { - "cFunctionName": "git_refdb_compress", - "args": [ - { - "name": "refdb", - "cType": "git_refdb *", - "cppClassName": "GitRefDb", - "jsClassName": "RefDb", - "isSelf": true - } - ], - "ignore": true, - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "compress", - "cppFunctionName": "Compress", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Suggests that the given refdb compress or optimize its references.\nThis mechanism is implementation specific. For on-disk reference\ndatabases, for example, this may pack all loose references.

\n" - }, - { - "cFunctionName": "git_refdb_free", - "args": [ - { - "name": "refdb", - "cType": "git_refdb *", - "cppClassName": "GitRefDb", - "jsClassName": "RefDb", - "comment": "reference database pointer or NULL" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "isFree": true, - "jsFunctionName": "free", - "cppFunctionName": "Free", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Close an open reference database.

\n" - }, - { - "cFunctionName": "git_refdb_set_backend", - "args": [ - { - "name": "refdb", - "cType": "git_refdb *", - "cppClassName": "GitRefDb", - "jsClassName": "RefDb", - "isSelf": true, - "comment": "database to add the backend to" - }, - { - "name": "backend", - "cType": "git_refdb_backend *", - "cppClassName": "RefdbBackend", - "jsClassName": "RefdbBackend", - "comment": "pointer to a git_refdb_backend instance" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "setBackend", - "cppFunctionName": "SetBackend", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success; error code otherwise", - "jsClassName": "Number" - }, - "description": "

Sets the custom backend to an existing reference DB

\n" - } - ] - }, - { - "filename": "refdb_backend.h", - "ignore": true, - "jsClassName": "RefdbBackend", - "cppClassName": "RefdbBackend", - "cType": "git_refdb_backend", - "freeFunctionName": "git_refdb_backend_free", - "functions": [] - }, - { - "filename": "reflog.h", - "ignore": true, - "jsClassName": "Reflog", - "cppClassName": "Reflog", - "cType": "git_reflog", - "freeFunctionName": "git_reflog_free", - "functions": [ - { - "cFunctionName": "git_reflog_read", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_reflog **", - "cppClassName": "Reflog", - "jsClassName": "Reflog", - "comment": "pointer to reflog" - }, - { - "name": "ref", - "cType": "const git_reference *", - "cppClassName": "GitReference", - "jsClassName": "Reference", - "comment": "reference to read the reflog for" - } - ], - "isAsync": false, - "isConstructorMethod": true, - "isPrototypeMethod": false, - "jsFunctionName": "read", - "cppFunctionName": "Read", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Read the reflog for the given reference

\n" - }, - { - "cFunctionName": "git_reflog_write", - "args": [ - { - "name": "reflog", - "cType": "git_reflog *", - "cppClassName": "Reflog", - "jsClassName": "Reflog", - "isSelf": true, - "comment": "an existing reflog object" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "write", - "cppFunctionName": "Write", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Write an existing in-memory reflog object back to disk\nusing an atomic file lock.

\n" - }, - { - "cFunctionName": "git_reflog_append", - "args": [ - { - "name": "reflog", - "cType": "git_reflog *", - "cppClassName": "Reflog", - "jsClassName": "Reflog", - "isSelf": true, - "comment": "an existing reflog object" - }, - { - "name": "id", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "the OID the reference is now pointing to" - }, - { - "name": "committer", - "cType": "const git_signature *", - "cppClassName": "GitSignature", - "jsClassName": "Signature", - "comment": "the signature of the committer" - }, - { - "name": "msg", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the reflog message" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "append", - "cppFunctionName": "Append", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Add a new entry to the reflog.

\n" - }, - { - "cFunctionName": "git_reflog_rename", - "args": [ - { - "name": "ref", - "cType": "git_reference *", - "cppClassName": "GitReference", - "jsClassName": "Reference", - "comment": "the reference" - }, - { - "name": "name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the new name of the reference" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "rename", - "cppFunctionName": "Rename", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_EINVALIDSPEC or an error code", - "jsClassName": "Number" - }, - "description": "

Rename the reflog for the given reference

\n" - }, - { - "cFunctionName": "git_reflog_delete", - "args": [ - { - "name": "ref", - "cType": "git_reference *", - "cppClassName": "GitReference", - "jsClassName": "Reference", - "comment": "the reference" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "delete", - "cppFunctionName": "Delete", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Delete the reflog for the given reference

\n" - }, - { - "cFunctionName": "git_reflog_entrycount", - "args": [ - { - "name": "reflog", - "cType": "git_reflog *", - "cppClassName": "Reflog", - "jsClassName": "Reflog", - "isSelf": true, - "comment": "the previously loaded reflog" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "entrycount", - "cppFunctionName": "Entrycount", - "return": { - "cType": "size_t", - "cppClassName": "Uint32", - "comment": "the number of log entries", - "jsClassName": "Number" - }, - "description": "

Get the number of log entries in a reflog

\n" - }, - { - "cFunctionName": "git_reflog_entry_byindex", - "args": [ - { - "name": "reflog", - "cType": "git_reflog *", - "cppClassName": "Reflog", - "jsClassName": "Reflog", - "isSelf": true, - "comment": "a previously loaded reflog" - }, - { - "name": "idx", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "the position of the entry to lookup. Should be greater than or equal to 0 (zero) and less than `git_reflog_entrycount()`." - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "entryByindex", - "cppFunctionName": "EntryByindex", - "return": { - "cType": "const git_reflog_entry *", - "cppClassName": "ReflogEntry", - "copy": "fixme", - "comment": "the entry; NULL if not found", - "jsClassName": "ReflogEntry" - }, - "description": "

Lookup an entry by its index

\n" - }, - { - "cFunctionName": "git_reflog_drop", - "args": [ - { - "name": "reflog", - "cType": "git_reflog *", - "cppClassName": "Reflog", - "jsClassName": "Reflog", - "isSelf": true, - "comment": "a previously loaded reflog." - }, - { - "name": "idx", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "the position of the entry to remove. Should be greater than or equal to 0 (zero) and less than `git_reflog_entrycount()`." - }, - { - "name": "rewrite_previous_entry", - "cType": "int", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "1 to rewrite the history; 0 otherwise." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "drop", - "cppFunctionName": "Drop", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_ENOTFOUND if the entry doesn't exist or an error code.", - "jsClassName": "Number" - }, - "description": "

Remove an entry from the reflog by its index

\n" - }, - { - "cFunctionName": "git_reflog_entry_id_old", - "args": [ - { - "name": "entry", - "cType": "const git_reflog_entry *", - "cppClassName": "ReflogEntry", - "jsClassName": "ReflogEntry", - "comment": "a reflog entry" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "entryIdOld", - "cppFunctionName": "EntryIdOld", - "return": { - "cType": "const git_oid *", - "cppClassName": "GitOid", - "copy": "git_oid_dup", - "comment": "the old oid", - "jsClassName": "Oid" - }, - "description": "

Get the old oid

\n" - }, - { - "cFunctionName": "git_reflog_entry_id_new", - "args": [ - { - "name": "entry", - "cType": "const git_reflog_entry *", - "cppClassName": "ReflogEntry", - "jsClassName": "ReflogEntry", - "comment": "a reflog entry" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "entryIdNew", - "cppFunctionName": "EntryIdNew", - "return": { - "cType": "const git_oid *", - "cppClassName": "GitOid", - "copy": "git_oid_dup", - "comment": "the new oid at this time", - "jsClassName": "Oid" - }, - "description": "

Get the new oid

\n" - }, - { - "cFunctionName": "git_reflog_entry_committer", - "args": [ - { - "name": "entry", - "cType": "const git_reflog_entry *", - "cppClassName": "ReflogEntry", - "jsClassName": "ReflogEntry", - "comment": "a reflog entry" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "entryCommitter", - "cppFunctionName": "EntryCommitter", - "return": { - "cType": "const git_signature *", - "cppClassName": "GitSignature", - "copy": "git_signature_dup", - "comment": "the committer", - "jsClassName": "Signature" - }, - "description": "

Get the committer of this entry

\n" - }, - { - "cFunctionName": "git_reflog_entry_message", - "args": [ - { - "name": "entry", - "cType": "const git_reflog_entry *", - "cppClassName": "ReflogEntry", - "jsClassName": "ReflogEntry", - "comment": "a reflog entry" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "entryMessage", - "cppFunctionName": "EntryMessage", - "return": { - "cType": "const char *", - "cppClassName": "String", - "comment": "the log msg", - "jsClassName": "String" - }, - "description": "

Get the log message

\n" - }, - { - "cFunctionName": "git_reflog_free", - "args": [ - { - "name": "reflog", - "cType": "git_reflog *", - "cppClassName": "Reflog", - "jsClassName": "Reflog", - "comment": "reflog to free" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "isFree": true, - "jsFunctionName": "free", - "cppFunctionName": "Free", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Free the reflog

\n" - } - ] - }, - { - "filename": "reference.h", - "dependencies": [ - "../include/repo.h", - "../include/oid.h", - "../include/object.h" - ], - "jsClassName": "Reference", - "cppClassName": "GitReference", - "cType": "git_reference", - "freeFunctionName": "git_reference_free", - "functions": [ - { - "cFunctionName": "git_reference_name_to_id", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "shouldAlloc": true, - "comment": "Pointer to oid to be filled in" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "The repository in which to look up the reference" - }, - { - "name": "name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "The long name for the reference (e.g. HEAD, refs/heads/master, refs/tags/v0.1.0, ...)" - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "oidForName", - "cppFunctionName": "OidForName", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, ENOTFOUND, EINVALIDSPEC or an error code.", - "jsClassName": "Number" - }, - "description": "

Lookup a reference by name and resolve immediately to OID.

\n" - }, - { - "cFunctionName": "git_reference_target", - "args": [ - { - "name": "ref", - "cType": "const git_reference *", - "cppClassName": "GitReference", - "jsClassName": "Reference", - "isSelf": true, - "comment": "The reference" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "target", - "cppFunctionName": "Target", - "return": { - "cType": "const git_oid *", - "cppClassName": "GitOid", - "copy": "git_oid_dup", - "comment": "a pointer to the oid if available, NULL otherwise", - "jsClassName": "Oid" - }, - "description": "

Get the OID pointed to by a direct reference.

\n" - }, - { - "cFunctionName": "git_reference_symbolic_target", - "args": [ - { - "name": "ref", - "cType": "const git_reference *", - "cppClassName": "GitReference", - "jsClassName": "Reference", - "isSelf": true, - "comment": "The reference" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "symbolicTarget", - "cppFunctionName": "SymbolicTarget", - "return": { - "cType": "const char *", - "cppClassName": "String", - "comment": "a pointer to the name if available, NULL otherwise", - "jsClassName": "String" - }, - "description": "

Get full name to the reference pointed to by a symbolic reference.

\n" - }, - { - "cFunctionName": "git_reference_type", - "args": [ - { - "name": "ref", - "cType": "const git_reference *", - "cppClassName": "GitReference", - "jsClassName": "Reference", - "isSelf": true, - "comment": "The reference" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "type", - "cppFunctionName": "Type", - "return": { - "cType": "git_ref_t", - "cppClassName": "Number", - "comment": "the type", - "jsClassName": "Number" - }, - "description": "

Get the type of a reference.

\n" - }, - { - "cFunctionName": "git_reference_name", - "args": [ - { - "name": "ref", - "cType": "const git_reference *", - "cppClassName": "GitReference", - "jsClassName": "Reference", - "isSelf": true, - "comment": "The reference" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "name", - "cppFunctionName": "Name", - "return": { - "cType": "const char *", - "cppClassName": "String", - "comment": "the full name for the ref", - "jsClassName": "String" - }, - "description": "

Get the full name of a reference.

\n" - }, - { - "cFunctionName": "git_reference_resolve", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_reference **", - "cppClassName": "GitReference", - "jsClassName": "Reference", - "comment": "Pointer to the peeled reference" - }, - { - "name": "ref", - "cType": "const git_reference *", - "cppClassName": "GitReference", - "jsClassName": "Reference", - "isSelf": true, - "comment": "The reference" - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "resolve", - "cppFunctionName": "Resolve", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Resolve a symbolic reference to a direct reference.

\n" - }, - { - "cFunctionName": "git_reference_owner", - "args": [ - { - "name": "ref", - "cType": "const git_reference *", - "cppClassName": "GitReference", - "jsClassName": "Reference", - "isSelf": true, - "comment": "The reference" - } - ], - "ignore": "Never make public for memory allocation reasons", - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "owner", - "cppFunctionName": "owner", - "return": { - "cType": "git_repository *", - "cppClassName": "GitRepo", - "comment": "a pointer to the repo", - "jsClassName": "Repository" - }, - "description": "

Get the repository where a reference resides.

\n" - }, - { - "cFunctionName": "git_reference_symbolic_set_target", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_reference **", - "cppClassName": "GitReference", - "jsClassName": "Reference", - "comment": "Pointer to the newly created reference" - }, - { - "name": "ref", - "cType": "git_reference *", - "cppClassName": "GitReference", - "jsClassName": "Reference", - "isSelf": true, - "comment": "The reference" - }, - { - "name": "target", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "The new target for the reference" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "setSymbolicTarget", - "cppFunctionName": "SetSymbolicTarget", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, EINVALIDSPEC or an error code", - "jsClassName": "Number" - }, - "description": "

Create a new reference with the same name as the given reference but a\ndifferent symbolic target. The reference must be a symbolic reference,\notherwise this will fail.

\n" - }, - { - "cFunctionName": "git_reference_set_target", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_reference **", - "cppClassName": "GitReference", - "jsClassName": "Reference", - "comment": "Pointer to the newly created reference" - }, - { - "name": "ref", - "cType": "git_reference *", - "cppClassName": "GitReference", - "jsClassName": "Reference", - "isSelf": true, - "comment": "The reference" - }, - { - "name": "id", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "The new target OID for the reference" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "setTarget", - "cppFunctionName": "setTarget", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Create a new reference with the same name as the given reference but a\ndifferent OID target. The reference must be a direct reference, otherwise\nthis will fail.

\n" - }, - { - "cFunctionName": "git_reference_rename", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_reference **", - "cppClassName": "GitReference", - "jsClassName": "Reference" - }, - { - "name": "ref", - "cType": "git_reference *", - "cppClassName": "GitReference", - "jsClassName": "Reference", - "isSelf": true, - "comment": "The reference to rename" - }, - { - "name": "new_name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "The new name for the reference" - }, - { - "name": "force", - "cType": "int", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "Overwrite an existing reference" - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "rename", - "cppFunctionName": "Rename", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, EINVALIDSPEC, EEXISTS or an error code", - "jsClassName": "Number" - }, - "description": "

Rename an existing reference.

\n" - }, - { - "cFunctionName": "git_reference_delete", - "args": [ - { - "name": "ref", - "cType": "git_reference *", - "cppClassName": "GitReference", - "jsClassName": "Reference", - "isSelf": true, - "comment": "The reference to remove" - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "delete", - "cppFunctionName": "Delete", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Delete an existing reference.

\n" - }, - { - "cFunctionName": "git_reference_foreach", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "Repository where to find the refs" - }, - { - "name": "list_flags", - "cType": "unsigned int", - "cppClassName": "Uint32", - "jsClassName": "Number" - }, - { - "name": "callback", - "cType": "git_reference_foreach_cb", - "cppClassName": "ReferenceForeachCb", - "jsClassName": "ReferenceForeachCb" - }, - { - "name": "payload", - "cType": "void *", - "cppClassName": "void", - "jsClassName": "void" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "foreach", - "cppFunctionName": "Foreach", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_EUSER on non-zero callback, or error code", - "jsClassName": "Number" - }, - "description": "

Perform a callback on each reference in the repository.

\n" - }, - { - "cFunctionName": "git_reference_free", - "args": [ - { - "name": "ref", - "cType": "git_reference *", - "cppClassName": "GitReference", - "jsClassName": "Reference", - "comment": "git_reference" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "isFree": true, - "jsFunctionName": "gitReferenceFree", - "cppFunctionName": "GitReferenceFree", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Free the given reference.

\n" - }, - { - "cFunctionName": "git_reference_cmp", - "args": [ - { - "name": "ref1", - "cType": "git_reference *", - "cppClassName": "GitReference", - "jsClassName": "Reference", - "isSelf": true, - "comment": "The first git_reference" - }, - { - "name": "ref2", - "cType": "git_reference *", - "cppClassName": "GitReference", - "jsClassName": "Reference", - "comment": "The second git_reference" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "compare", - "cppFunctionName": "Compare", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 if the same, else a stable but meaningless ordering.", - "jsClassName": "Number" - }, - "description": "

Compare two references.

\n" - }, - { - "cFunctionName": "git_reference_foreach_glob", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "Repository where to find the refs" - }, - { - "name": "glob", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "Pattern to match (fnmatch-style) against reference name." - }, - { - "name": "list_flags", - "cType": "unsigned int", - "cppClassName": "Uint32", - "jsClassName": "Number" - }, - { - "name": "callback", - "cType": "git_reference_foreach_cb", - "cppClassName": "ReferenceForeachCb", - "jsClassName": "ReferenceForeachCb" - }, - { - "name": "payload", - "cType": "void *", - "cppClassName": "void", - "jsClassName": "void" - } - ], - "ignore": true, - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "foreachGlob", - "cppFunctionName": "ForeachGlob", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_EUSER on non-zero callback, or error code", - "jsClassName": "Number" - }, - "description": "

Perform a callback on each reference in the repository whose name\nmatches the given pattern.

\n" - }, - { - "cFunctionName": "git_reference_has_log", - "args": [ - { - "name": "ref", - "cType": "git_reference *", - "cppClassName": "GitReference", - "jsClassName": "Reference", - "isSelf": true, - "comment": "A git reference" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "hasLog", - "cppFunctionName": "HasLog", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 when no reflog can be found, 1 when it exists; otherwise an error code.", - "jsClassName": "Number" - }, - "description": "

Check if a reflog exists for the specified reference.

\n" - }, - { - "cFunctionName": "git_reference_is_branch", - "args": [ - { - "name": "ref", - "cType": "git_reference *", - "cppClassName": "GitReference", - "jsClassName": "Reference", - "isSelf": true, - "comment": "A git reference" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "isBranch", - "cppFunctionName": "IsBranch", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "1 when the reference lives in the refs/heads namespace; 0 otherwise.", - "jsClassName": "Number" - }, - "description": "

Check if a reference is a local branch.

\n" - }, - { - "cFunctionName": "git_reference_is_remote", - "args": [ - { - "name": "ref", - "cType": "git_reference *", - "cppClassName": "GitReference", - "jsClassName": "Reference", - "isSelf": true, - "comment": "A git reference" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "isRemote", - "cppFunctionName": "IsRemote", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "1 when the reference lives in the refs/remotes namespace; 0 otherwise.", - "jsClassName": "Number" - }, - "description": "

Check if a reference is a remote tracking branch

\n" - }, - { - "cFunctionName": "git_reference_normalize_name", - "args": [ - { - "name": "buffer_out", - "cType": "char *", - "cppClassName": "String", - "jsClassName": "String", - "isSelf": true, - "comment": "User allocated buffer to store normalized name" - }, - { - "name": "buffer_size", - "cType": "size_t", - "cppClassName": "Integer", - "jsClassName": "Number", - "comment": "Size of buffer_out" - }, - { - "name": "name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "Reference name to be checked." - }, - { - "name": "flags", - "cType": "unsigned int", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "Flags to constrain name validation rules - see the GIT_REF_FORMAT constants above." - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "normalizeName", - "cppFunctionName": "NormalizeName", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_EBUFS if buffer is too small, EINVALIDSPEC or an error code.", - "jsClassName": "Number" - }, - "description": "

Normalize reference name and check validity.

\n" - }, - { - "cFunctionName": "git_reference_peel", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_object **", - "cppClassName": "GitObject", - "jsClassName": "Object", - "comment": "Pointer to the peeled git_object" - }, - { - "name": "ref", - "cType": "git_reference *", - "cppClassName": "GitReference", - "jsClassName": "Reference", - "isSelf": true, - "comment": "The reference to be processed" - }, - { - "name": "type", - "cType": "git_otype", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "The type of the requested object (GIT_OBJ_COMMIT, GIT_OBJ_TAG, GIT_OBJ_TREE, GIT_OBJ_BLOB or GIT_OBJ_ANY)." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "peel", - "cppFunctionName": "Peel", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_EAMBIGUOUS, GIT_ENOTFOUND or an error code", - "jsClassName": "Number" - }, - "description": "

Recursively peel reference until object of the specified type is found.

\n" - }, - { - "cFunctionName": "git_reference_is_valid_name", - "args": [ - { - "name": "refname", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "name to be checked." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "isValidName", - "cppFunctionName": "IsValidName", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "1 if the reference name is acceptable; 0 if it isn't", - "jsClassName": "Number" - }, - "description": "

Ensure the reference name is well-formed.

\n" - } - ] - }, - { - "filename": "refspec.h", - "ignore": true, - "jsClassName": "Refspec", - "cppClassName": "Refspec", - "cType": "git_refspec", - "freeFunctionName": "git_refspec_free", - "functions": [ - { - "cFunctionName": "git_refspec_src", - "args": [ - { - "name": "refspec", - "cType": "const git_refspec *", - "cppClassName": "Refspec", - "jsClassName": "Refspec", - "isSelf": true, - "comment": "the refspec" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "src", - "cppFunctionName": "Src", - "return": { - "cType": "const char *", - "cppClassName": "String", - "comment": "the refspec's source specifier", - "jsClassName": "String" - }, - "description": "

Get the source specifier

\n" - }, - { - "cFunctionName": "git_refspec_dst", - "args": [ - { - "name": "refspec", - "cType": "const git_refspec *", - "cppClassName": "Refspec", - "jsClassName": "Refspec", - "isSelf": true, - "comment": "the refspec" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "dst", - "cppFunctionName": "Dst", - "return": { - "cType": "const char *", - "cppClassName": "String", - "comment": "the refspec's destination specifier", - "jsClassName": "String" - }, - "description": "

Get the destination specifier

\n" - }, - { - "cFunctionName": "git_refspec_force", - "args": [ - { - "name": "refspec", - "cType": "const git_refspec *", - "cppClassName": "Refspec", - "jsClassName": "Refspec", - "isSelf": true, - "comment": "the refspec" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "force", - "cppFunctionName": "Force", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "1 if force update has been set, 0 otherwise", - "jsClassName": "Number" - }, - "description": "

Get the force update setting

\n" - }, - { - "cFunctionName": "git_refspec_src_matches", - "args": [ - { - "name": "refspec", - "cType": "const git_refspec *", - "cppClassName": "Refspec", - "jsClassName": "Refspec", - "isSelf": true, - "comment": "the refspec" - }, - { - "name": "refname", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the name of the reference to check" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "srcMatches", - "cppFunctionName": "SrcMatches", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "1 if the refspec matches, 0 otherwise", - "jsClassName": "Number" - }, - "description": "

Check if a refspec's source descriptor matches a reference

\n" - }, - { - "cFunctionName": "git_refspec_dst_matches", - "args": [ - { - "name": "refspec", - "cType": "const git_refspec *", - "cppClassName": "Refspec", - "jsClassName": "Refspec", - "isSelf": true, - "comment": "the refspec" - }, - { - "name": "refname", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the name of the reference to check" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "dstMatches", - "cppFunctionName": "DstMatches", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "1 if the refspec matches, 0 otherwise", - "jsClassName": "Number" - }, - "description": "

Check if a refspec's destination descriptor matches a reference

\n" - }, - { - "cFunctionName": "git_refspec_transform", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "where to store the target name" - }, - { - "name": "outlen", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "the size of the `out` buffer" - }, - { - "name": "spec", - "cType": "const git_refspec *", - "cppClassName": "Refspec", - "jsClassName": "Refspec", - "comment": "the refspec" - }, - { - "name": "name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the name of the reference to transform" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "transform", - "cppFunctionName": "Transform", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0, GIT_EBUFS or another error", - "jsClassName": "Number" - }, - "description": "

Transform a reference to its target following the refspec's rules

\n" - }, - { - "cFunctionName": "git_refspec_rtransform", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "where to store the source reference name" - }, - { - "name": "outlen", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "the size of the `out` buffer" - }, - { - "name": "spec", - "cType": "const git_refspec *", - "cppClassName": "Refspec", - "jsClassName": "Refspec", - "comment": "the refspec" - }, - { - "name": "name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the name of the reference to transform" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "rtransform", - "cppFunctionName": "Rtransform", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0, GIT_EBUFS or another error", - "jsClassName": "Number" - }, - "description": "

Transform a target reference to its source reference following the refspec's rules

\n" - } - ] - }, - { - "filename": "remote.h", - "dependencies": [ - "git2/net.h" - ], - "jsClassName": "Remote", - "cppClassName": "GitRemote", - "cType": "git_remote", - "freeFunctionName": "git_remote_free", - "functions": [ - { - "cFunctionName": "git_remote_save", - "args": [ - { - "name": "remote", - "cType": "const git_remote *", - "cppClassName": "GitRemote", - "jsClassName": "Remote", - "isSelf": true, - "comment": "the remote to save to config" - } - ], - "ignore": "called automatically by git_remote_create", - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "save", - "cppFunctionName": "Save", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0, GIT_EINVALIDSPEC or an error code", - "jsClassName": "Number" - }, - "description": "

Save a remote to its repository's configuration

\n" - }, - { - "cFunctionName": "git_remote_name", - "args": [ - { - "name": "remote", - "cType": "const git_remote *", - "cppClassName": "GitRemote", - "jsClassName": "Remote", - "isSelf": true, - "comment": "the remote" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "name", - "cppFunctionName": "Name", - "return": { - "cType": "const char *", - "cppClassName": "String", - "comment": "a pointer to the name or NULL for in-memory remotes", - "jsClassName": "String" - }, - "description": "

Get the remote's name

\n" - }, - { - "cFunctionName": "git_remote_url", - "args": [ - { - "name": "remote", - "cType": "const git_remote *", - "cppClassName": "GitRemote", - "jsClassName": "Remote", - "isSelf": true, - "comment": "the remote" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "url", - "cppFunctionName": "Url", - "return": { - "cType": "const char *", - "cppClassName": "String", - "comment": "a pointer to the url", - "jsClassName": "String" - }, - "description": "

Get the remote's url

\n" - }, - { - "cFunctionName": "git_remote_pushurl", - "args": [ - { - "name": "remote", - "cType": "const git_remote *", - "cppClassName": "GitRemote", - "jsClassName": "Remote", - "isSelf": true, - "comment": "the remote" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "pushUrl", - "cppFunctionName": "PushUrl", - "return": { - "cType": "const char *", - "cppClassName": "String", - "comment": "a pointer to the url or NULL if no special url for pushing is set", - "jsClassName": "String" - }, - "description": "

Get the remote's url for pushing

\n" - }, - { - "cFunctionName": "git_remote_set_url", - "args": [ - { - "name": "remote", - "cType": "git_remote *", - "cppClassName": "GitRemote", - "jsClassName": "Remote", - "isSelf": true, - "comment": "the remote" - }, - { - "name": "url", - "cType": "const char*", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the url to set" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "setUrl", - "cppFunctionName": "SetUrl", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error value", - "jsClassName": "Number" - }, - "description": "

Set the remote's url

\n" - }, - { - "cFunctionName": "git_remote_set_pushurl", - "args": [ - { - "name": "remote", - "cType": "git_remote *", - "cppClassName": "GitRemote", - "jsClassName": "Remote", - "isSelf": true, - "comment": "the remote" - }, - { - "name": "url", - "cType": "const char*", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the url to set or NULL to clear the pushurl" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "setPushUrl", - "cppFunctionName": "SetPushUrl", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error value", - "jsClassName": "Number" - }, - "description": "

Set the remote's url for pushing

\n" - }, - { - "cFunctionName": "git_remote_set_fetchspec", - "args": [ - { - "name": "remote", - "cType": "git_remote *", - "cppClassName": "GitRemote", - "jsClassName": "Remote", - "isSelf": true - }, - { - "name": "spec", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "setFetchspec", - "cppFunctionName": "SetFetchspec", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - } - }, - { - "cFunctionName": "git_remote_fetchspec", - "args": [ - { - "name": "remote", - "cType": "const git_remote *", - "cppClassName": "GitRemote", - "jsClassName": "Remote", - "isSelf": true - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "fetchspec", - "cppFunctionName": "Fetchspec", - "return": { - "cType": "const git_refspec *", - "cppClassName": "Refspec", - "copy": "fixme", - "jsClassName": "Refspec" - } - }, - { - "cFunctionName": "git_remote_set_pushspec", - "args": [ - { - "name": "remote", - "cType": "git_remote *", - "cppClassName": "GitRemote", - "jsClassName": "Remote", - "isSelf": true - }, - { - "name": "spec", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "setPushspec", - "cppFunctionName": "SetPushspec", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - } - }, - { - "cFunctionName": "git_remote_pushspec", - "args": [ - { - "name": "remote", - "cType": "const git_remote *", - "cppClassName": "GitRemote", - "jsClassName": "Remote", - "isSelf": true - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "pushspec", - "cppFunctionName": "Pushspec", - "return": { - "cType": "const git_refspec *", - "cppClassName": "Refspec", - "copy": "fixme", - "jsClassName": "Refspec" - } - }, - { - "cFunctionName": "git_remote_connect", - "args": [ - { - "name": "remote", - "cType": "git_remote *", - "cppClassName": "GitRemote", - "jsClassName": "Remote", - "isSelf": true, - "comment": "the remote to connect to" - }, - { - "name": "direction", - "cType": "git_direction", - "cppClassName": "Number", - "jsClassName": "Number", - "comment": "GIT_DIRECTION_FETCH if you want to fetch or GIT_DIRECTION_PUSH if you want to push", - "additionalCast": "(int)" - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "connect", - "cppFunctionName": "Connect", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Open a connection to a remote

\n" - }, - { - "cFunctionName": "git_remote_ls", - "args": [ - { - "name": "remote", - "cType": "git_remote *", - "cppClassName": "GitRemote", - "jsClassName": "Remote", - "isSelf": true, - "comment": "the remote" - }, - { - "name": "list_cb", - "cType": "git_headlist_cb", - "cppClassName": "HeadlistCb", - "jsClassName": "HeadlistCb", - "comment": "function to call with each ref discovered at the remote" - }, - { - "name": "payload", - "cType": "void *", - "cppClassName": "void", - "jsClassName": "void", - "comment": "additional data to pass to the callback" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "ls", - "cppFunctionName": "Ls", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Get a list of refs at the remote

\n" - }, - { - "cFunctionName": "git_remote_download", - "args": [ - { - "name": "remote", - "cType": "git_remote *", - "cppClassName": "GitRemote", - "jsClassName": "Remote", - "isSelf": true, - "comment": "the remote to download from" - }, - { - "name": "progress_cb", - "cType": "git_transfer_progress_callback", - "cppClassName": "Function", - "jsClassName": "Function", - "isOptional": true, - "comment": "function to call with progress information. Be aware that this is called inline with network and indexing operations, so performance may be affected." - }, - { - "name": "payload", - "cType": "void *", - "cppClassName": "void", - "jsClassName": "void", - "isPayload": true, - "comment": "payload for the progress callback" - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "download", - "cppFunctionName": "Download", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Download the packfile

\n" - }, - { - "cFunctionName": "git_remote_connected", - "args": [ - { - "name": "remote", - "cType": "git_remote *", - "cppClassName": "GitRemote", - "jsClassName": "Remote", - "isSelf": true, - "comment": "the remote" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "connected", - "cppFunctionName": "Connected", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Check whether the remote is connected

\n" - }, - { - "cFunctionName": "git_remote_stop", - "args": [ - { - "name": "remote", - "cType": "git_remote *", - "cppClassName": "GitRemote", - "jsClassName": "Remote", - "isSelf": true, - "comment": "the remote" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "stop", - "cppFunctionName": "Stop", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Cancel the operation

\n" - }, - { - "cFunctionName": "git_remote_disconnect", - "args": [ - { - "name": "remote", - "cType": "git_remote *", - "cppClassName": "GitRemote", - "jsClassName": "Remote", - "isSelf": true, - "comment": "the remote to disconnect from" - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "disconnect", - "cppFunctionName": "Disconnect", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Disconnect from the remote

\n" - }, - { - "cFunctionName": "git_remote_free", - "args": [ - { - "name": "remote", - "cType": "git_remote *", - "cppClassName": "GitRemote", - "jsClassName": "Remote", - "comment": "the remote to free" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "isFree": true, - "jsFunctionName": "free", - "cppFunctionName": "Free", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Free the memory associated with a remote

\n" - }, - { - "cFunctionName": "git_remote_update_tips", - "args": [ - { - "name": "remote", - "cType": "git_remote *", - "cppClassName": "GitRemote", - "jsClassName": "Remote", - "isSelf": true, - "comment": "the remote to update" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "updateTips", - "cppFunctionName": "UpdateTips", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Update the tips to the new state

\n" - }, - { - "cFunctionName": "git_remote_valid_url", - "args": [ - { - "name": "url", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the url to check" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "validUrl", - "cppFunctionName": "ValidUrl", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Return whether a string is a valid remote URL

\n" - }, - { - "cFunctionName": "git_remote_supported_url", - "args": [ - { - "name": "url", - "cType": "const char*", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the url to check" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "supportedUrl", - "cppFunctionName": "SupportedUrl", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Return whether the passed URL is supported by this version of the library.

\n" - }, - { - "cFunctionName": "git_remote_check_cert", - "args": [ - { - "name": "remote", - "cType": "git_remote *", - "cppClassName": "GitRemote", - "jsClassName": "Remote", - "isSelf": true, - "comment": "the remote to configure" - }, - { - "name": "check", - "cType": "int", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "whether to check the server's certificate (defaults to yes)" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "checkCert", - "cppFunctionName": "CheckCert", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Choose whether to check the server's certificate (applies to HTTPS only)

\n" - }, - { - "cFunctionName": "git_remote_set_cred_acquire_cb", - "args": [ - { - "name": "remote", - "cType": "git_remote *", - "cppClassName": "GitRemote", - "jsClassName": "Remote", - "isSelf": true, - "comment": "the remote to configure" - }, - { - "name": "cred_acquire_cb", - "cType": "git_cred_acquire_cb", - "cppClassName": "CredAcquireCb", - "jsClassName": "CredAcquireCb", - "comment": "The credentials acquisition callback to use (defaults to NULL)" - }, - { - "name": "payload", - "cType": "void *", - "cppClassName": "void", - "jsClassName": "void" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "setCredAcquireCb", - "cppFunctionName": "SetCredAcquireCb", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Set a credentials acquisition callback for this remote. If the remote is\nnot available for anonymous access, then you must set this callback in order\nto provide credentials to the transport at the time of authentication\nfailure so that retry can be performed.

\n" - }, - { - "cFunctionName": "git_remote_set_transport", - "args": [ - { - "name": "remote", - "cType": "git_remote *", - "cppClassName": "GitRemote", - "jsClassName": "Remote", - "isSelf": true, - "comment": "the remote to configure" - }, - { - "name": "transport", - "cType": "git_transport *", - "cppClassName": "Transport", - "jsClassName": "Transport", - "comment": "the transport object for the remote to use" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "setTransport", - "cppFunctionName": "SetTransport", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Sets a custom transport for the remote. The caller can use this function\nto bypass the automatic discovery of a transport by URL scheme (i.e.\nhttp://, https://, git://) and supply their own transport to be used\ninstead. After providing the transport to a remote using this function,\nthe transport object belongs exclusively to that remote, and the remote will\nfree it when it is freed with git_remote_free.

\n" - }, - { - "cFunctionName": "git_remote_set_callbacks", - "args": [ - { - "name": "remote", - "cType": "git_remote *", - "cppClassName": "GitRemote", - "jsClassName": "Remote", - "isSelf": true, - "comment": "the remote to configure" - }, - { - "name": "callbacks", - "cType": "git_remote_callbacks *", - "cppClassName": "RemoteCallbacks", - "jsClassName": "RemoteCallbacks", - "comment": "a pointer to the user's callback settings" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "setCallbacks", - "cppFunctionName": "SetCallbacks", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Set the callbacks for a remote

\n" - }, - { - "cFunctionName": "git_remote_stats", - "args": [ - { - "name": "remote", - "cType": "git_remote *", - "cppClassName": "GitRemote", - "jsClassName": "Remote", - "isSelf": true - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "stats", - "cppFunctionName": "Stats", - "return": { - "cType": "const git_transfer_progress *", - "cppClassName": "TransferProgress", - "jsClassName": "TransferProgress" - }, - "description": "

Get the statistics structure that is filled in by the fetch operation.

\n" - }, - { - "cFunctionName": "git_remote_autotag", - "args": [], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "autotag", - "cppFunctionName": "Autotag", - "return": { - "cType": "GIT_EXTERN(", - "cppClassName": "GIT_EXTERN(" - }, - "description": "

Retrieve the tag auto-follow setting

\n" - }, - { - "cFunctionName": "git_remote_set_autotag", - "args": [ - { - "name": "remote", - "cType": "git_remote *", - "cppClassName": "GitRemote", - "jsClassName": "Remote", - "isSelf": true, - "comment": "the remote to configure" - }, - { - "name": "value", - "cType": "git_remote_autotag_option_t", - "cppClassName": "RemoteAutotagOptionT", - "jsClassName": "RemoteAutotagOptionT", - "comment": "a GIT_REMOTE_DOWNLOAD_TAGS value" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "setAutotag", - "cppFunctionName": "SetAutotag", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Set the tag auto-follow setting

\n" - }, - { - "cFunctionName": "git_remote_rename", - "args": [ - { - "name": "remote", - "cType": "git_remote *", - "cppClassName": "GitRemote", - "jsClassName": "Remote", - "isSelf": true, - "comment": "the remote to rename" - }, - { - "name": "new_name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the new name the remote should bear" - }, - { - "name": "callback", - "cType": "git_remote_rename_problem_cb", - "cppClassName": "RemoteRenameProblemCb", - "jsClassName": "RemoteRenameProblemCb", - "comment": "Optional callback to notify the consumer of fetch refspecs that haven't been automatically updated and need potential manual tweaking." - }, - { - "name": "payload", - "cType": "void *", - "cppClassName": "void", - "jsClassName": "void", - "comment": "Additional data to pass to the callback" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "rename", - "cppFunctionName": "Rename", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Give the remote a new name

\n" - }, - { - "cFunctionName": "git_remote_update_fetchhead", - "args": [ - { - "name": "remote", - "cType": "git_remote *", - "cppClassName": "GitRemote", - "jsClassName": "Remote", - "isSelf": true, - "comment": "the remote to query" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "updateFetchhead", - "cppFunctionName": "UpdateFetchhead", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Retrieve the update FETCH_HEAD setting.

\n" - }, - { - "cFunctionName": "git_remote_set_update_fetchhead", - "args": [ - { - "name": "remote", - "cType": "git_remote *", - "cppClassName": "GitRemote", - "jsClassName": "Remote", - "isSelf": true, - "comment": "the remote to configure" - }, - { - "name": "value", - "cType": "int", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "0 to disable updating FETCH_HEAD" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "setUpdateFetchhead", - "cppFunctionName": "SetUpdateFetchhead", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Sets the update FETCH_HEAD setting. By default, FETCH_HEAD will be\nupdated on every fetch. Set to 0 to disable.

\n" - }, - { - "cFunctionName": "git_remote_is_valid_name", - "args": [ - { - "name": "remote_name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "name to be checked." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "isValidName", - "cppFunctionName": "IsValidName", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Ensure the remote name is well-formed.

\n" - } - ] - }, - { - "filename": "repo.h", - "dependencies": [ - "../include/oid.h", - "../include/commit.h", - "../include/blob.h", - "../include/object.h", - "../include/reference.h", - "../include/submodule.h", - "../include/refdb.h", - "../include/revwalk.h", - "../include/tag.h", - "../include/signature.h", - "../include/tree.h", - "../include/odb.h", - "../include/index.h", - "../include/remote.h", - "../include/clone_options.h", - "node_buffer.h" - ], - "jsClassName": "Repo", - "cppClassName": "GitRepo", - "cType": "git_repository", - "freeFunctionName": "git_repository_free", - "functions": [ - { - "cFunctionName": "git_repository_open", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_repository **", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "pointer to the repo which will be opened" - }, - { - "name": "path", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the path to the repository" - } - ], - "isAsync": true, - "isConstructorMethod": true, - "isPrototypeMethod": false, - "jsFunctionName": "open", - "cppFunctionName": "Open", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Open a git repository.

\n" - }, - { - "cFunctionName": "git_repository_wrap_odb", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_repository **", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "pointer to the repo" - }, - { - "name": "odb", - "cType": "git_odb *", - "cppClassName": "GitOdb", - "jsClassName": "Odb", - "comment": "the object database to wrap" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": true, - "isPrototypeMethod": false, - "jsFunctionName": "wrapOdb", - "cppFunctionName": "WrapOdb", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Create a "fake" repository to wrap an object database

\n" - }, - { - "cFunctionName": "git_repository_discover", - "args": [ - { - "name": "path_out", - "cType": "char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "The user allocated buffer which will contain the found path." - }, - { - "name": "path_size", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "repository_path size" - }, - { - "name": "start_path", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "The base path where the lookup starts." - }, - { - "name": "across_fs", - "cType": "int", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "If true, then the lookup will not stop when a filesystem device change is detected while exploring parent directories." - }, - { - "name": "ceiling_dirs", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "A GIT_PATH_LIST_SEPARATOR separated list of absolute symbolic link free paths. The lookup will stop when any of this paths is reached. Note that the lookup always performs on start_path no matter start_path appears in ceiling_dirs ceiling_dirs might be NULL (which is equivalent to an empty string)" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "discover", - "cppFunctionName": "Discover", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Look for a git repository and copy its path in the given buffer.\nThe lookup start from base_path and walk across parent directories\nif nothing has been found. The lookup ends when the first repository\nis found, or when reaching a directory referenced in ceiling_dirs\nor when the filesystem changes (in case across_fs is true).

\n" - }, - { - "cFunctionName": "git_repository_open_ext", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_repository **", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "Pointer to the repo which will be opened. This can actually be NULL if you only want to use the error code to see if a repo at this path could be opened." - }, - { - "name": "path", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "Path to open as git repository. If the flags permit \"searching\", then this can be a path to a subdirectory inside the working directory of the repository." - }, - { - "name": "flags", - "cType": "unsigned int", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "A combination of the GIT_REPOSITORY_OPEN flags above." - }, - { - "name": "ceiling_dirs", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "A GIT_PATH_LIST_SEPARATOR delimited list of path prefixes at which the search for a containing repository should terminate." - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": true, - "isPrototypeMethod": false, - "jsFunctionName": "openExt", - "cppFunctionName": "OpenExt", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_ENOTFOUND if no repository could be found, or -1 if there was a repository but open failed for some reason (such as repo corruption or system errors).", - "jsClassName": "Number" - }, - "description": "

Find and open a repository with extended controls.

\n" - }, - { - "cFunctionName": "git_repository_free", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "repository handle to close. If NULL nothing occurs." - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "isFree": true, - "jsFunctionName": "free", - "cppFunctionName": "Free", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Free a previously allocated repository

\n" - }, - { - "cFunctionName": "git_repository_init", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_repository **", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "pointer to the repo which will be created or reinitialized" - }, - { - "name": "path", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the path to the repository" - }, - { - "name": "is_bare", - "cType": "unsigned", - "cppClassName": "Boolean", - "jsClassName": "Boolean", - "comment": "if true, a Git repository without a working directory is created at the pointed path. If false, provided path will be considered as the working directory into which the .git directory will be created." - } - ], - "isAsync": true, - "isConstructorMethod": true, - "isPrototypeMethod": false, - "jsFunctionName": "init", - "cppFunctionName": "Init", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Creates a new Git repository in the given folder.

\n" - }, - { - "cFunctionName": "git_repository_init_ext", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_repository **", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "Pointer to the repo which will be created or reinitialized." - }, - { - "name": "repo_path", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "The path to the repository." - }, - { - "name": "opts", - "cType": "git_repository_init_options *", - "cppClassName": "RepositoryInitOptions", - "jsClassName": "RepositoryInitOptions", - "comment": "Pointer to git_repository_init_options struct." - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": true, - "isPrototypeMethod": false, - "jsFunctionName": "initExt", - "cppFunctionName": "InitExt", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code on failure.", - "jsClassName": "Number" - }, - "description": "

Create a new Git repository in the given folder with extended controls.

\n" - }, - { - "cFunctionName": "git_repository_head", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_reference **", - "cppClassName": "GitReference", - "jsClassName": "Reference", - "comment": "pointer to the reference which will be retrieved" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "a repository object" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "head", - "cppFunctionName": "Head", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_EORPHANEDHEAD when HEAD points to a non existing branch, GIT_ENOTFOUND when HEAD is missing; an error code otherwise", - "jsClassName": "Number" - }, - "description": "

Retrieve and resolve the reference pointed at by HEAD.

\n" - }, - { - "cFunctionName": "git_repository_head_detached", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "Repo to test" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "headDetached", - "cppFunctionName": "HeadDetached", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "1 if HEAD is detached, 0 if it's not; error code if there was an error.", - "jsClassName": "Number" - }, - "description": "

Check if a repository's HEAD is detached

\n" - }, - { - "cFunctionName": "git_repository_head_orphan", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "Repo to test" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "headOrphan", - "cppFunctionName": "HeadOrphan", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "1 if the current branch is an orphan, 0 if it's not; error code if there was an error", - "jsClassName": "Number" - }, - "description": "

Check if the current branch is an orphan

\n" - }, - { - "cFunctionName": "git_repository_is_empty", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "Repo to test" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "isEmpty", - "cppFunctionName": "IsEmpty", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "1 if the repository is empty, 0 if it isn't, error code if the repository is corrupted", - "jsClassName": "Number" - }, - "description": "

Check if a repository is empty

\n" - }, - { - "cFunctionName": "git_repository_path", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "A repository object" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "path", - "cppFunctionName": "Path", - "return": { - "cType": "const char *", - "cppClassName": "String", - "comment": "the path to the repository", - "jsClassName": "String" - }, - "description": "

Get the path of this repository

\n" - }, - { - "cFunctionName": "git_repository_workdir", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "A repository object" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "workdir", - "cppFunctionName": "Workdir", - "return": { - "cType": "const char *", - "cppClassName": "String", - "comment": "the path to the working dir, if it exists", - "jsClassName": "String" - }, - "description": "

Get the path of the working directory for this repository

\n" - }, - { - "cFunctionName": "git_repository_set_workdir", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "A repository object" - }, - { - "name": "workdir", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "The path to a working directory" - }, - { - "name": "update_gitlink", - "cType": "int", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "Create/update gitlink in workdir and set config \"core.worktree\" (if workdir is not the parent of the .git directory)" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "setWorkdir", - "cppFunctionName": "SetWorkdir", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0, or an error code", - "jsClassName": "Number" - }, - "description": "

Set the path to the working directory for this repository

\n" - }, - { - "cFunctionName": "git_repository_is_bare", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "Repo to test" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "isBare", - "cppFunctionName": "IsBare", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "1 if the repository is bare, 0 otherwise.", - "jsClassName": "Number" - }, - "description": "

Check if a repository is bare

\n" - }, - { - "cFunctionName": "git_repository_config", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_config **", - "cppClassName": "Config", - "jsClassName": "Config", - "comment": "Pointer to store the loaded config file" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "A repository object" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "config", - "cppFunctionName": "Config", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0, or an error code", - "jsClassName": "Number" - }, - "description": "

Get the configuration file for this repository.

\n" - }, - { - "cFunctionName": "git_repository_set_config", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "A repository object" - }, - { - "name": "config", - "cType": "git_config *", - "cppClassName": "Config", - "jsClassName": "Config", - "comment": "A Config object" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "setConfig", - "cppFunctionName": "SetConfig", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Set the configuration file for this repository

\n" - }, - { - "cFunctionName": "git_repository_odb", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_odb **", - "cppClassName": "GitOdb", - "jsClassName": "Odb", - "comment": "Pointer to store the loaded ODB" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "A repository object" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "odb", - "cppFunctionName": "Odb", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0, or an error code", - "jsClassName": "Number" - }, - "description": "

Get the Object Database for this repository.

\n" - }, - { - "cFunctionName": "git_repository_set_odb", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "A repository object" - }, - { - "name": "odb", - "cType": "git_odb *", - "cppClassName": "GitOdb", - "jsClassName": "Odb", - "comment": "An ODB object" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "setOdb", - "cppFunctionName": "SetOdb", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Set the Object Database for this repository

\n" - }, - { - "cFunctionName": "git_repository_refdb", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_refdb **", - "cppClassName": "GitRefDb", - "jsClassName": "RefDb", - "comment": "Pointer to store the loaded refdb" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "A repository object" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "refdb", - "cppFunctionName": "Refdb", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0, or an error code", - "jsClassName": "Number" - }, - "description": "

Get the Reference Database Backend for this repository.

\n" - }, - { - "cFunctionName": "git_repository_set_refdb", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "A repository object" - }, - { - "name": "refdb", - "cType": "git_refdb *", - "cppClassName": "GitRefDb", - "jsClassName": "RefDb", - "comment": "An refdb object" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "setRefdb", - "cppFunctionName": "SetRefdb", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Set the Reference Database Backend for this repository

\n" - }, - { - "cFunctionName": "git_repository_index", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_index **", - "cppClassName": "GitIndex", - "jsClassName": "Index", - "comment": "Pointer to store the loaded index" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "A repository object" - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "openIndex", - "cppFunctionName": "openIndex", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0, or an error code", - "jsClassName": "Number" - }, - "description": "

Get the Index file for this repository.

\n" - }, - { - "cFunctionName": "git_repository_set_index", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "A repository object" - }, - { - "name": "index", - "cType": "git_index *", - "cppClassName": "GitIndex", - "jsClassName": "Index", - "comment": "An index object" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "setIndex", - "cppFunctionName": "SetIndex", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Set the index file for this repository

\n" - }, - { - "cFunctionName": "git_repository_message", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "Buffer to write data into or NULL to just read required size" - }, - { - "name": "len", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "Length of `out` buffer in bytes" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "Repository to read prepared message from" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "message", - "cppFunctionName": "Message", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "GIT_ENOUTFOUND if no message exists, other value < 0 for other errors, or total bytes in message (may be > `len`) on success", - "jsClassName": "Number" - }, - "description": "

Retrieve git's prepared message

\n" - }, - { - "cFunctionName": "git_repository_message_remove", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "messageRemove", - "cppFunctionName": "MessageRemove", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Remove git's prepared message.

\n" - }, - { - "cFunctionName": "git_repository_merge_cleanup", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "A repository object" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "mergeCleanup", - "cppFunctionName": "MergeCleanup", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, or error", - "jsClassName": "Number" - }, - "description": "

Remove all the metadata associated with an ongoing git merge, including\nMERGE_HEAD, MERGE_MSG, etc.

\n" - }, - { - "cFunctionName": "git_repository_fetchhead_foreach", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "A repository object" - }, - { - "name": "callback", - "cType": "git_repository_fetchhead_foreach_cb", - "cppClassName": "RepositoryFetchheadForeachCb", - "jsClassName": "RepositoryFetchheadForeachCb", - "comment": "Callback function" - }, - { - "name": "payload", - "cType": "void *", - "cppClassName": "void", - "jsClassName": "void", - "comment": "Pointer to callback data (optional)" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "fetchheadForeach", - "cppFunctionName": "FetchheadForeach", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_ENOTFOUND, GIT_EUSER or error", - "jsClassName": "Number" - }, - "description": "

Call callback 'callback' for each entry in the given FETCH_HEAD file.

\n" - }, - { - "cFunctionName": "git_repository_mergehead_foreach", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "A repository object" - }, - { - "name": "callback", - "cType": "git_repository_mergehead_foreach_cb", - "cppClassName": "RepositoryMergeheadForeachCb", - "jsClassName": "RepositoryMergeheadForeachCb", - "comment": "Callback function" - }, - { - "name": "payload", - "cType": "void *", - "cppClassName": "void", - "jsClassName": "void", - "comment": "Pointer to callback data (optional)" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "mergeheadForeach", - "cppFunctionName": "MergeheadForeach", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_ENOTFOUND, GIT_EUSER or error", - "jsClassName": "Number" - }, - "description": "

If a merge is in progress, call callback 'cb' for each commit ID in the\nMERGE_HEAD file.

\n" - }, - { - "cFunctionName": "git_repository_hashfile", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "shouldAlloc": true, - "comment": "Output value of calculated SHA" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "Repository pointer" - }, - { - "name": "path", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "Path to file on disk whose contents should be hashed. If the repository is not NULL, this can be a relative path." - }, - { - "name": "type", - "cType": "git_otype", - "cppClassName": "Int32", - "jsClassName": "Otype", - "comment": "The object type to hash as (e.g. GIT_OBJ_BLOB)" - }, - { - "name": "as_path", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "The path to use to look up filtering rules. If this is NULL, then the `path` parameter will be used instead. If this is passed as the empty string, then no filters will be applied when calculating the hash." - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "hashfile", - "cppFunctionName": "Hashfile", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Calculate hash of file using repository filtering rules.

\n" - }, - { - "cFunctionName": "git_repository_set_head", - "args": [ - { - "name": "repo", - "cType": "git_repository*", - "cppClassName": "Repository*", - "jsClassName": "Repository*", - "comment": "Repository pointer" - }, - { - "name": "refname", - "cType": "const char*", - "cppClassName": "String", - "jsClassName": "String", - "comment": "Canonical name of the reference the HEAD should point at" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "setHead", - "cppFunctionName": "SetHead", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, or an error code", - "jsClassName": "Number" - }, - "description": "

Make the repository HEAD point to the specified reference.

\n" - }, - { - "cFunctionName": "git_repository_set_head_detached", - "args": [ - { - "name": "repo", - "cType": "git_repository*", - "cppClassName": "Repository*", - "jsClassName": "Repository*", - "comment": "Repository pointer" - }, - { - "name": "commitish", - "cType": "const git_oid*", - "cppClassName": "String", - "jsClassName": "String", - "comment": "Object id of the Commit the HEAD should point to" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "setHeadDetached", - "cppFunctionName": "SetHeadDetached", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, or an error code", - "jsClassName": "Number" - }, - "description": "

Make the repository HEAD directly point to the Commit.

\n" - }, - { - "cFunctionName": "git_repository_detach_head", - "args": [ - { - "name": "repo", - "cType": "git_repository*", - "cppClassName": "Repository*", - "jsClassName": "Repository*", - "comment": "Repository pointer" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "detachHead", - "cppFunctionName": "DetachHead", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_EORPHANEDHEAD when HEAD points to a non existing branch or an error code", - "jsClassName": "Number" - }, - "description": "

Detach the HEAD.

\n" - }, - { - "cFunctionName": "git_repository_state", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "Repository pointer" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "state", - "cppFunctionName": "State", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "The state of the repository", - "jsClassName": "Number" - }, - "description": "

Determines the status of a git repository - ie, whether an operation\n(merge, cherry-pick, etc) is in progress.

\n" - }, - { - "cFunctionName": "git_blob_lookup", - "args": [ - { - "name": "blob", - "cType": "git_blob **", - "cppClassName": "GitBlob", - "jsClassName": "Blob", - "isReturn": true, - "comment": "pointer to the looked up blob" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "the repo to use when locating the blob." - }, - { - "name": "id", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "identity of the blob to locate." - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "getBlob", - "cppFunctionName": "GetBlob", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Lookup a blob object from a repository.

\n" - }, - { - "cFunctionName": "git_blob_lookup_prefix", - "args": [ - { - "name": "blob", - "cType": "git_blob **", - "cppClassName": "GitBlob", - "jsClassName": "Blob", - "isReturn": true, - "comment": "pointer to the looked up blob" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "the repo to use when locating the blob." - }, - { - "name": "id", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "identity of the blob to locate." - }, - { - "name": "len", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "the length of the short identifier" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "getBlobByPrefix", - "cppFunctionName": "GetBlobByPrefix", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Lookup a blob object from a repository,\ngiven a prefix of its identifier (short id).

\n" - }, - { - "cFunctionName": "git_commit_lookup", - "args": [ - { - "name": "commit", - "cType": "git_commit **", - "cppClassName": "GitCommit", - "jsClassName": "Commit", - "isReturn": true, - "comment": "pointer to the looked up commit" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "the repo to use when locating the commit." - }, - { - "name": "id", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "identity of the commit to locate. If the object is an annotated tag it will be peeled back to the commit." - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "getCommit", - "cppFunctionName": "GetCommit", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Lookup a commit object from a repository.

\n" - }, - { - "cFunctionName": "git_commit_create", - "args": [ - { - "name": "id", - "cType": "git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "isReturn": true, - "shouldAlloc": true, - "comment": "Pointer in which to store the OID of the newly created commit" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "Repository where to store the commit" - }, - { - "name": "update_ref", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "isOptional": true, - "comment": "If not NULL, name of the reference that will be updated to point to this commit. If the reference is not direct, it will be resolved to a direct reference. Use \"HEAD\" to update the HEAD of the current branch and make it point to this commit. If the reference doesn't exist yet, it will be created." - }, - { - "name": "author", - "cType": "const git_signature *", - "cppClassName": "GitSignature", - "jsClassName": "Signature", - "comment": "Signature with author and author time of commit" - }, - { - "name": "committer", - "cType": "const git_signature *", - "cppClassName": "GitSignature", - "jsClassName": "Signature", - "comment": "Signature with committer and * commit time of commit" - }, - { - "name": "message_encoding", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "isOptional": true, - "comment": "The encoding for the message in the commit, represented with a standard encoding name. E.g. \"UTF-8\". If NULL, no encoding header is written and UTF-8 is assumed." - }, - { - "name": "message", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "_encoding The encoding for the message in the commit, represented with a standard encoding name. E.g. \"UTF-8\". If NULL, no encoding header is written and UTF-8 is assumed." - }, - { - "name": "tree", - "cType": "const git_tree *", - "cppClassName": "GitTree", - "jsClassName": "Tree", - "comment": "An instance of a `git_tree` object that will be used as the tree for the commit. This tree object must also be owned by the given `repo`." - }, - { - "name": "parent_count", - "cType": "int", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "Number of parents for this commit" - }, - { - "name": "parents", - "cType": "const git_commit **", - "cppClassName": "Array", - "arrayElementCppClassName": "GitCommit", - "jsClassName": "Array" - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "createCommit", - "cppFunctionName": "CreateCommit", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code The created commit will be written to the Object Database and the given reference will be updated to point to it", - "jsClassName": "Number" - }, - "description": "

Create new commit in the repository from a list of git_object pointers

\n" - }, - { - "cFunctionName": "git_object_lookup", - "args": [ - { - "name": "object", - "cType": "git_object **", - "cppClassName": "GitObject", - "jsClassName": "Object", - "isReturn": true, - "comment": "pointer to the looked-up object" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "the repository to look up the object" - }, - { - "name": "id", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "the unique identifier for the object" - }, - { - "name": "type", - "cType": "git_otype", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "the type of the object" - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "getObject", - "cppFunctionName": "GetObject", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "a reference to the object", - "jsClassName": "Number" - }, - "description": "

Lookup a reference to one of the objects in a repository.

\n" - }, - { - "cFunctionName": "git_object_lookup_prefix", - "args": [ - { - "name": "object_out", - "cType": "git_object **", - "cppClassName": "GitObject", - "jsClassName": "Object", - "isReturn": true, - "comment": "pointer where to store the looked-up object" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "the repository to look up the object" - }, - { - "name": "id", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "a short identifier for the object" - }, - { - "name": "len", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "the length of the short identifier" - }, - { - "name": "type", - "cType": "git_otype", - "cppClassName": "Number", - "jsClassName": "Number", - "comment": "the type of the object" - } - ], - "ignore": true, - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "getObjectByPrefix", - "cppFunctionName": "GetObjectByPrefix", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Lookup a reference to one of the objects in a repository,\ngiven a prefix of its identifier (short id).

\n" - }, - { - "cFunctionName": "git_refdb_new", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_refdb **", - "cppClassName": "GitRefDb", - "jsClassName": "RefDb", - "comment": "location to store the database pointer, if opened. Set to NULL if the open failed." - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "the repository" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "newRefDbWithoutBackends", - "cppFunctionName": "NewRefDbWithoutBackends", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Create a new reference database with no backends.

\n" - }, - { - "cFunctionName": "git_refdb_open", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_refdb **", - "cppClassName": "GitRefDb", - "jsClassName": "RefDb", - "comment": "location to store the database pointer, if opened. Set to NULL if the open failed." - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "the repository" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "openRefDb", - "cppFunctionName": "OpenRefDb", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Create a new reference database and automatically add\nthe default backends:

\n" - }, - { - "cFunctionName": "git_refdb_backend_fs", - "args": [ - { - "name": "backend_out", - "cType": "struct git_refdb_backend **", - "cppClassName": "RefdbBackend", - "jsClassName": "RefdbBackend", - "isReturn": true, - "comment": "Output pointer to the git_refdb_backend object" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "Git repository to access" - }, - { - "name": "refdb", - "cType": "git_refdb *", - "cppClassName": "GitRefDb", - "jsClassName": "RefDb" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "openRefDbBackend", - "cppFunctionName": "OpenRefDbBackend", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, <0 error code on failure", - "jsClassName": "Number" - }, - "description": "

Constructors for default filesystem-based refdb backend

\n" - }, - { - "cFunctionName": "git_reference_lookup", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_reference **", - "cppClassName": "GitReference", - "jsClassName": "Reference", - "comment": "pointer to the looked-up reference" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "the repository to look up the reference" - }, - { - "name": "name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the long name for the reference (e.g. HEAD, refs/heads/master, refs/tags/v0.1.0, ...)" - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "getReference", - "cppFunctionName": "GetReference", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, ENOTFOUND, EINVALIDSPEC or an error code.", - "jsClassName": "Number" - }, - "description": "

Lookup a reference by name in a repository.

\n" - }, - { - "cFunctionName": "git_reference_symbolic_create", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_reference **", - "cppClassName": "GitReference", - "jsClassName": "Reference", - "comment": "Pointer to the newly created reference" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "Repository where that reference will live" - }, - { - "name": "name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "The name of the reference" - }, - { - "name": "target", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "The target of the reference" - }, - { - "name": "force", - "cType": "int", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "Overwrite existing references" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "createSymbolicReference", - "cppFunctionName": "CreateSymbolicReference", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, EEXISTS, EINVALIDSPEC or an error code", - "jsClassName": "Number" - }, - "description": "

Create a new symbolic reference.

\n" - }, - { - "cFunctionName": "git_reference_create", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_reference **", - "cppClassName": "GitReference", - "jsClassName": "Reference", - "comment": "Pointer to the newly created reference" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "Repository where that reference will live" - }, - { - "name": "name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "The name of the reference" - }, - { - "name": "id", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "The object id pointed to by the reference." - }, - { - "name": "force", - "cType": "int", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "Overwrite existing references" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "createReference", - "cppFunctionName": "CreateReference", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, EEXISTS, EINVALIDSPEC or an error code", - "jsClassName": "Number" - }, - "description": "

Create a new direct reference.

\n" - }, - { - "cFunctionName": "git_remote_create", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_remote **", - "cppClassName": "GitRemote", - "jsClassName": "Remote", - "comment": "the resulting remote" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "the repository in which to create the remote" - }, - { - "name": "name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the remote's name" - }, - { - "name": "url", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the remote's url" - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "addRemote", - "cppFunctionName": "AddRemote", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0, GIT_EINVALIDSPEC, GIT_EEXISTS or an error code", - "jsClassName": "Number" - }, - "description": "

Add a remote with the default fetch refspec to the repository's configuration. This\ncalls git_remote_save before returning.

\n" - }, - { - "cFunctionName": "git_remote_create_inmemory", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_remote **", - "cppClassName": "GitRemote", - "jsClassName": "Remote", - "comment": "pointer to the new remote object" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "the associated repository" - }, - { - "name": "fetch", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the fetch refspec to use for this remote. May be NULL for defaults." - }, - { - "name": "url", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the remote repository's URL" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "addRemoteInMemory", - "cppFunctionName": "AddRemoteInMemory", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Create a remote in memory

\n" - }, - { - "cFunctionName": "git_revwalk_new", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_revwalk **", - "cppClassName": "GitRevWalk", - "jsClassName": "RevWalk", - "comment": "pointer to the new revision walker" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "the repo to walk through" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "createRevWalk", - "cppFunctionName": "CreateRevWalk", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Allocate a new revision walker to iterate through a repo.

\n" - }, - { - "cFunctionName": "git_submodule_lookup", - "args": [ - { - "name": "submodule", - "cType": "git_submodule **", - "cppClassName": "GitSubmodule", - "jsClassName": "Submodule", - "isReturn": true, - "comment": "Pointer to submodule description object pointer.." - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "The repository." - }, - { - "name": "name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "The name of the submodule. Trailing slashes will be ignored." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "getSubmodule", - "cppFunctionName": "GetSubmodule", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_ENOTFOUND if submodule does not exist, GIT_EEXISTS if submodule exists in working directory only, -1 on other errors.", - "jsClassName": "Number" - }, - "description": "

Lookup submodule information by name or path.

\n" - }, - { - "cFunctionName": "git_submodule_add_setup", - "args": [ - { - "name": "submodule", - "cType": "git_submodule **", - "cppClassName": "GitSubmodule", - "jsClassName": "Submodule", - "isReturn": true, - "comment": "The newly created submodule ready to open for clone" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "Superproject repository to contain the new submodule" - }, - { - "name": "url", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "URL for the submodules remote" - }, - { - "name": "path", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "Path at which the submodule should be created" - }, - { - "name": "use_gitlink", - "cType": "int", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "Should workdir contain a gitlink to the repo in .git/modules vs. repo directly in workdir." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "addSubmodule", - "cppFunctionName": "AddSubmodule", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_EEXISTS if submodule already exists, -1 on other errors.", - "jsClassName": "Number" - }, - "description": "

Set up a new git submodule for checkout.

\n" - }, - { - "cFunctionName": "git_tag_lookup", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_tag **", - "cppClassName": "GitTag", - "jsClassName": "Tag", - "comment": "pointer to the looked up tag" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "the repo to use when locating the tag." - }, - { - "name": "id", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "identity of the tag to locate." - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "getTag", - "cppFunctionName": "GetTag", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Lookup a tag object from the repository.

\n" - }, - { - "cFunctionName": "git_tag_lookup_prefix", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_tag **", - "cppClassName": "GitTag", - "jsClassName": "Tag", - "comment": "pointer to the looked up tag" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "the repo to use when locating the tag." - }, - { - "name": "id", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "identity of the tag to locate." - }, - { - "name": "len", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "the length of the short identifier" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "getTagByPrefix", - "cppFunctionName": "GetTagByPrefix", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Lookup a tag object from the repository,\ngiven a prefix of its identifier (short id).

\n" - }, - { - "cFunctionName": "git_tag_create", - "args": [ - { - "name": "oid", - "cType": "git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "isReturn": true, - "shouldAlloc": true, - "comment": "Pointer where to store the OID of the newly created tag. If the tag already exists, this parameter will be the oid of the existing tag, and the function will return a GIT_EEXISTS error code." - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "Repository where to store the tag" - }, - { - "name": "tag_name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "Name for the tag; this name is validated for consistency. It should also not conflict with an already existing tag name" - }, - { - "name": "target", - "cType": "const git_object *", - "cppClassName": "GitObject", - "jsClassName": "Object", - "comment": "Object to which this tag points. This object must belong to the given `repo`." - }, - { - "name": "tagger", - "cType": "const git_signature *", - "cppClassName": "GitSignature", - "jsClassName": "Signature", - "comment": "Signature of the tagger for this tag, and of the tagging time" - }, - { - "name": "message", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "Full message for this tag" - }, - { - "name": "force", - "cType": "int", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "Overwrite existing references" - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "createTag", - "cppFunctionName": "CreateTag", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_EINVALIDSPEC or an error code A tag object is written to the ODB, and a proper reference is written in the /refs/tags folder, pointing to it", - "jsClassName": "Number" - }, - "description": "

Create a new tag in the repository from an object

\n" - }, - { - "cFunctionName": "git_tag_create_lightweight", - "args": [ - { - "name": "oid", - "cType": "git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "isReturn": true, - "shouldAlloc": true, - "comment": "Pointer where to store the OID of the provided target object. If the tag already exists, this parameter will be filled with the oid of the existing pointed object and the function will return a GIT_EEXISTS error code." - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "Repository where to store the lightweight tag" - }, - { - "name": "tag_name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "Name for the tag; this name is validated for consistency. It should also not conflict with an already existing tag name" - }, - { - "name": "target", - "cType": "const git_object *", - "cppClassName": "GitObject", - "jsClassName": "Object", - "comment": "Object to which this tag points. This object must belong to the given `repo`." - }, - { - "name": "force", - "cType": "int", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "Overwrite existing references" - } - ], - "isAsync": true, - "isConstructorMethod": true, - "isPrototypeMethod": false, - "jsFunctionName": "createLightweightTag", - "cppFunctionName": "CreateLightweightTag", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_EINVALIDSPEC or an error code A proper reference is written in the /refs/tags folder, pointing to the provided target object", - "jsClassName": "Number" - }, - "description": "

Create a new lightweight tag pointing at a target object

\n" - }, - { - "cFunctionName": "git_tree_lookup", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_tree **", - "cppClassName": "GitTree", - "jsClassName": "Tree", - "comment": "Pointer to the looked up tree" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "The repo to use when locating the tree." - }, - { - "name": "id", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "Identity of the tree to locate." - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "getTree", - "cppFunctionName": "GetTree", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Lookup a tree object from the repository.

\n" - }, - { - "cFunctionName": "git_tree_lookup_prefix", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_tree **", - "cppClassName": "GitTree", - "jsClassName": "Tree", - "comment": "pointer to the looked up tree" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "the repo to use when locating the tree." - }, - { - "name": "id", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "identity of the tree to locate." - }, - { - "name": "len", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "the length of the short identifier" - } - ], - "ignore": true, - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "getTreeByPrefix", - "cppFunctionName": "GetTreeByPrefix", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Lookup a tree object from the repository,\ngiven a prefix of its identifier (short id).

\n" - }, - { - "cFunctionName": "git_submodule_reload_all", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "reloadSubmodules", - "cppFunctionName": "ReloadSubmodules", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Reread all submodule info.

\n" - }, - { - "cFunctionName": "git_tag_delete", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "Repository where lives the tag" - }, - { - "name": "tag_name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "Name of the tag to be deleted; this name is validated for consistency." - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "delete", - "cppFunctionName": "Delete", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_EINVALIDSPEC or an error code", - "jsClassName": "Number" - }, - "description": "

Delete an existing tag reference.

\n" - }, - { - "cFunctionName": "git_tag_list", - "args": [ - { - "name": "tag_names", - "cType": "git_strarray *", - "cppClassName": "Array", - "jsClassName": "Array", - "isReturn": true, - "comment": "Pointer to a git_strarray structure where the tag names will be stored" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "Repository where to find the tags" - } - ], - "ignore": true, - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "listTags", - "cppFunctionName": "ListTags", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Fill a list with all the tags in the Repository

\n" - }, - { - "cFunctionName": "git_tag_list_match", - "args": [ - { - "name": "tag_names", - "cType": "git_strarray *", - "cppClassName": "Strarray", - "jsClassName": "Strarray", - "isReturn": true, - "comment": "Pointer to a git_strarray structure where the tag names will be stored" - }, - { - "name": "pattern", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "Standard fnmatch pattern" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "Repository where to find the tags" - } - ], - "ignore": true, - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "listMatch", - "cppFunctionName": "ListMatch", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Fill a list with all the tags in the Repository\nwhich name match a defined pattern

\n" - }, - { - "cFunctionName": "git_tag_foreach", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "Repository" - }, - { - "name": "callback", - "cType": "git_tag_foreach_cb", - "cppClassName": "TagForeachCb", - "jsClassName": "TagForeachCb", - "comment": "Callback function" - }, - { - "name": "payload", - "cType": "void *", - "cppClassName": "void", - "jsClassName": "void", - "comment": "Pointer to callback data (optional)" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "foreach", - "cppFunctionName": "Foreach", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Call callback `cb' for each tag in the repository

\n" - }, - { - "cFunctionName": "git_reference_list", - "args": [ - { - "name": "array", - "cType": "git_strarray *", - "cppClassName": "Array", - "jsClassName": "Array", - "freeFunctionName": "git_strarray_free", - "size": "count", - "key": "strings", - "shouldAlloc": true, - "isReturn": true, - "comment": "Pointer to a git_strarray structure where the reference names will be stored" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "Repository where to find the refs" - }, - { - "name": "list_flags", - "cType": "unsigned int", - "cppClassName": "Uint32", - "jsClassName": "Number", - "isOptional": true - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "getReferences", - "cppFunctionName": "GetReferences", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Fill a list with all the references that can be found in a repository.

\n" - }, - { - "cFunctionName": "git_blob_create_frombuffer", - "args": [ - { - "name": "oid", - "cType": "git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "shouldAlloc": true, - "isReturn": true, - "comment": "return the oid of the written blob" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "repository where to blob will be written" - }, - { - "name": "buffer", - "cType": "const void *", - "cppClassName": "Buffer", - "jsClassName": "Buffer", - "comment": "data to be written into the blob" - }, - { - "name": "len", - "cType": "size_t", - "cppClassName": "Number", - "jsClassName": "Number", - "comment": "length of the data" - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "createBlobFromBuffer", - "cppFunctionName": "CreateBlobFromBuffer", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Write an in-memory buffer to the ODB as a blob

\n" - }, - { - "cFunctionName": "git_blob_create_fromworkdir", - "args": [ - { - "name": "id", - "cType": "git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "shouldAlloc": true, - "isReturn": true, - "comment": "return the id of the written blob" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "repository where the blob will be written. this repository cannot be bare" - }, - { - "name": "relative_path", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "file from which the blob will be created, relative to the repository's working dir" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "createBlobFromWorkdir", - "cppFunctionName": "CreateBlobFromWorkdir", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Read a file from the working folder of a repository\nand write it to the Object Database as a loose blob

\n" - }, - { - "cFunctionName": "git_blob_create_fromdisk", - "args": [ - { - "name": "id", - "cType": "git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "shouldAlloc": true, - "isReturn": true, - "comment": "return the id of the written blob" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "repository where the blob will be written. this repository can be bare or not" - }, - { - "name": "path", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "file from which the blob will be created" - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "createBlobFromFile", - "cppFunctionName": "CreateBlobFromFile", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Read a file from the filesystem and write its content\nto the Object Database as a loose blob

\n" - }, - { - "cFunctionName": "git_blob_create_fromchunks", - "args": [ - { - "name": "id", - "cType": "git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "shouldAlloc": true, - "isReturn": true, - "comment": "Return the id of the written blob" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "repository where the blob will be written. This repository can be bare or not." - }, - { - "name": "hintpath", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "if not NULL, will help selecting the filters to apply onto the content of the blob to be created." - }, - { - "name": "callback", - "cType": "git_blob_chunk_cb", - "cppClassName": "BlobChunkCb", - "jsClassName": "BlobChunkCb" - }, - { - "name": "payload", - "cType": "void *", - "cppClassName": "void", - "jsClassName": "void" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "createBlobFromChunks", - "cppFunctionName": "CreateBlobFromChunks", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Write a loose blob to the Object Database from a\nprovider of chunks of data.

\n" - }, - { - "cFunctionName": "git_remote_list", - "args": [ - { - "name": "out", - "cType": "git_strarray *", - "cppClassName": "Array", - "jsClassName": "Array", - "freeFunctionName": "git_strarray_free", - "size": "count", - "key": "strings", - "shouldAlloc": true, - "isReturn": true, - "comment": "a string array which receives the names of the remotes" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "the repository to query" - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "getRemotes", - "cppFunctionName": "GetRemotes", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Get a list of the configured remotes for a repo

\n" - }, - { - "cFunctionName": "git_clone", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_repository **", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "pointer that will receive the resulting repository object" - }, - { - "name": "url", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the remote repository to clone" - }, - { - "name": "local_path", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "local directory to clone to" - }, - { - "name": "options", - "cType": "const git_clone_options *", - "cppClassName": "GitCloneOptions", - "jsClassName": "CloneOptions", - "isOptional": true, - "comment": "configuration options for the clone. If NULL, the function works as though GIT_OPTIONS_INIT were passed." - } - ], - "isAsync": true, - "isConstructorMethod": true, - "isPrototypeMethod": false, - "jsFunctionName": "clone", - "cppFunctionName": "Clone", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_ERROR otherwise (use giterr_last for information about the error)", - "jsClassName": "Number" - }, - "description": "

Clone a remote repository, and checkout the branch pointed to by the remote\nHEAD.

\n" - }, - { - "cFunctionName": "git_remote_load", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_remote **", - "cppClassName": "GitRemote", - "jsClassName": "Remote", - "comment": "pointer to the new remote object" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isSelf": true, - "comment": "the associated repository" - }, - { - "name": "name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the remote's name" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "getRemote", - "cppFunctionName": "GetRemote", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0, GIT_ENOTFOUND, GIT_EINVALIDSPEC or an error code", - "jsClassName": "Number" - }, - "description": "

Get the information for a particular remote

\n" - } - ] - }, - { - "filename": "reset.h", - "ignore": true, - "jsClassName": "Reset", - "cppClassName": "Reset", - "cType": "git_reset", - "freeFunctionName": "git_reset_free", - "functions": [ - { - "cFunctionName": "git_reset", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "Repository where to perform the reset operation." - }, - { - "name": "target", - "cType": "git_object *", - "cppClassName": "GitObject", - "jsClassName": "Object", - "comment": "Committish to which the Head should be moved to. This object must belong to the given `repo` and can either be a git_commit or a git_tag. When a git_tag is being passed, it should be dereferencable to a git_commit which oid will be used as the target of the branch." - }, - { - "name": "reset_type", - "cType": "git_reset_t", - "cppClassName": "ResetT", - "jsClassName": "ResetT", - "comment": "Kind of reset operation to perform." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "gitReset", - "cppFunctionName": "GitReset", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success or an error code", - "jsClassName": "Number" - }, - "description": "

Sets the current head to the specified commit oid and optionally\nresets the index and working tree to match.

\n" - }, - { - "cFunctionName": "git_reset_default", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "Repository where to perform the reset operation." - }, - { - "name": "target", - "cType": "git_object *", - "cppClassName": "GitObject", - "jsClassName": "Object", - "comment": "The committish which content will be used to reset the content of the index." - }, - { - "name": "pathspecs", - "cType": "git_strarray*", - "cppClassName": "Strarray*", - "jsClassName": "Strarray*", - "comment": "List of pathspecs to operate on." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "default", - "cppFunctionName": "Default", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success or an error code < 0", - "jsClassName": "Number" - }, - "description": "

Updates some entries in the index from the target commit tree.

\n" - } - ] - }, - { - "filename": "revparse.h", - "ignore": true, - "jsClassName": "Revparse", - "cppClassName": "Revparse", - "cType": "git_revparse", - "freeFunctionName": "git_revparse_free", - "functions": [ - { - "cFunctionName": "git_revparse_single", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_object **", - "cppClassName": "GitObject", - "jsClassName": "Object", - "comment": "pointer to output object" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "the repository to search in" - }, - { - "name": "spec", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the textual specification for an object" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "single", - "cppFunctionName": "Single", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_ENOTFOUND, GIT_EAMBIGUOUS, GIT_EINVALIDSPEC or an error code", - "jsClassName": "Number" - }, - "description": "

Find a single object, as specified by a revision string. See man gitrevisions,\nor http://git-scm.com/docs/git-rev-parse.html#_specifying_revisions for\ninformation on the syntax accepted.

\n" - }, - { - "cFunctionName": "git_revparse", - "args": [ - { - "name": "revspec", - "cType": "git_revspec *", - "cppClassName": "Revspec", - "jsClassName": "Revspec", - "comment": "Pointer to an user-allocated git_revspec struct where the result of the rev-parse will be stored" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "the repository to search in" - }, - { - "name": "spec", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the rev-parse spec to parse" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "gitRevparse", - "cppFunctionName": "GitRevparse", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_INVALIDSPEC, GIT_ENOTFOUND, GIT_EAMBIGUOUS or an error code", - "jsClassName": "Number" - }, - "description": "

Parse a revision string for from, to, and intent. See man gitrevisions or\nhttp://git-scm.com/docs/git-rev-parse.html#_specifying_revisions for information\non the syntax accepted.

\n" - } - ] - }, - { - "filename": "revwalk.h", - "dependencies": [ - "../include/oid.h", - "../include/repo.h" - ], - "jsClassName": "RevWalk", - "cppClassName": "GitRevWalk", - "cType": "git_revwalk", - "freeFunctionName": "git_revwalk_free", - "functions": [ - { - "cFunctionName": "git_revwalk_reset", - "args": [ - { - "name": "walker", - "cType": "git_revwalk *", - "cppClassName": "GitRevWalk", - "jsClassName": "RevWalk", - "isSelf": true, - "comment": "handle to reset." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "reset", - "cppFunctionName": "Reset", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Reset the revision walker for reuse.

\n" - }, - { - "cFunctionName": "git_revwalk_push", - "args": [ - { - "name": "walk", - "cType": "git_revwalk *", - "cppClassName": "GitRevWalk", - "jsClassName": "RevWalk", - "isSelf": true, - "comment": "the walker being used for the traversal." - }, - { - "name": "id", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "the oid of the commit to start from." - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "push", - "cppFunctionName": "Push", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Mark a commit to start traversal from.

\n" - }, - { - "cFunctionName": "git_revwalk_push_glob", - "args": [ - { - "name": "walk", - "cType": "git_revwalk *", - "cppClassName": "GitRevWalk", - "jsClassName": "RevWalk", - "isSelf": true, - "comment": "the walker being used for the traversal" - }, - { - "name": "glob", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the glob pattern references should match" - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "pushGlob", - "cppFunctionName": "PushGlob", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Push matching references

\n" - }, - { - "cFunctionName": "git_revwalk_push_head", - "args": [ - { - "name": "walk", - "cType": "git_revwalk *", - "cppClassName": "GitRevWalk", - "jsClassName": "RevWalk", - "isSelf": true, - "comment": "the walker being used for the traversal" - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "pushHead", - "cppFunctionName": "PushHead", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Push the repository's HEAD

\n" - }, - { - "cFunctionName": "git_revwalk_hide", - "args": [ - { - "name": "walk", - "cType": "git_revwalk *", - "cppClassName": "GitRevWalk", - "jsClassName": "RevWalk", - "isSelf": true, - "comment": "the walker being used for the traversal." - }, - { - "name": "commit_id", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "the oid of commit that will be ignored during the traversal" - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "hide", - "cppFunctionName": "Hide", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Mark a commit (and its ancestors) uninteresting for the output.

\n" - }, - { - "cFunctionName": "git_revwalk_hide_glob", - "args": [ - { - "name": "walk", - "cType": "git_revwalk *", - "cppClassName": "GitRevWalk", - "jsClassName": "RevWalk", - "isSelf": true, - "comment": "the walker being used for the traversal" - }, - { - "name": "glob", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the glob pattern references should match" - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "hideGlob", - "cppFunctionName": "HideGlob", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Hide matching references.

\n" - }, - { - "cFunctionName": "git_revwalk_hide_head", - "args": [ - { - "name": "walk", - "cType": "git_revwalk *", - "cppClassName": "GitRevWalk", - "jsClassName": "RevWalk", - "isSelf": true, - "comment": "the walker being used for the traversal" - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "hideHead", - "cppFunctionName": "HideHead", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Hide the repository's HEAD

\n" - }, - { - "cFunctionName": "git_revwalk_push_ref", - "args": [ - { - "name": "walk", - "cType": "git_revwalk *", - "cppClassName": "GitRevWalk", - "jsClassName": "RevWalk", - "isSelf": true, - "comment": "the walker being used for the traversal" - }, - { - "name": "refname", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the reference to push" - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "pushRef", - "cppFunctionName": "PushRef", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Push the OID pointed to by a reference

\n" - }, - { - "cFunctionName": "git_revwalk_hide_ref", - "args": [ - { - "name": "walk", - "cType": "git_revwalk *", - "cppClassName": "GitRevWalk", - "jsClassName": "RevWalk", - "isSelf": true, - "comment": "the walker being used for the traversal" - }, - { - "name": "refname", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the reference to hide" - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "hideRef", - "cppFunctionName": "HideRef", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Hide the OID pointed to by a reference

\n" - }, - { - "cFunctionName": "git_revwalk_next", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_oid *", - "shouldAlloc": true, - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "Pointer where to store the oid of the next commit" - }, - { - "name": "walk", - "cType": "git_revwalk *", - "cppClassName": "GitRevWalk", - "jsClassName": "RevWalk", - "isSelf": true, - "comment": "the walker to pop the commit from." - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "next", - "cppFunctionName": "Next", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 if the next commit was found; GIT_ITEROVER if there are no commits left to iterate", - "jsClassName": "Number" - }, - "description": "

Get the next commit from the revision walk.

\n" - }, - { - "cFunctionName": "git_revwalk_sorting", - "args": [ - { - "name": "walk", - "cType": "git_revwalk *", - "cppClassName": "GitRevWalk", - "jsClassName": "RevWalk", - "isSelf": true, - "comment": "the walker being used for the traversal." - }, - { - "name": "sort_mode", - "cType": "unsigned int", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "combination of GIT_SORT_XXX flags" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "sorting", - "cppFunctionName": "Sorting", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Change the sorting mode when iterating through the\nrepository's contents.

\n" - }, - { - "cFunctionName": "git_revwalk_push_range", - "args": [ - { - "name": "walk", - "cType": "git_revwalk *", - "cppClassName": "GitRevWalk", - "jsClassName": "RevWalk", - "isSelf": true, - "comment": "the walker being used for the traversal" - }, - { - "name": "range", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the range" - } - ], - "ignore": "This is in the documentation, but doesn't seem to exist!", - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "pushRange", - "cppFunctionName": "PushRange", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Push and hide the respective endpoints of the given range.

\n" - }, - { - "cFunctionName": "git_revwalk_free", - "args": [ - { - "name": "walk", - "cType": "git_revwalk *", - "cppClassName": "GitRevWalk", - "jsClassName": "RevWalk", - "comment": "traversal handle to close. If NULL nothing occurs." - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "isFree": true, - "jsFunctionName": "free", - "cppFunctionName": "Free", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Free a revision walker previously allocated.

\n" - }, - { - "cFunctionName": "git_revwalk_repository", - "args": [ - { - "name": "walk", - "cType": "git_revwalk *", - "cppClassName": "GitRevWalk", - "jsClassName": "RevWalk", - "isSelf": true, - "comment": "the revision walker" - } - ], - "ignore": "Never make public for memory allocation reasons", - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "repository", - "cppFunctionName": "Repository", - "return": { - "cType": "git_repository *", - "cppClassName": "GitRepo", - "comment": "the repository being walked", - "jsClassName": "Repository" - }, - "description": "

Return the repository on which this walker\nis operating.

\n" - } - ] - }, - { - "filename": "time.h", - "dependencies": [], - "jsClassName": "Time", - "cppClassName": "GitTime", - "cType": "git_time", - "freeFunctionName": "free", - "fields": [ - { - "jsFunctionName": "time", - "cppFunctionName": "Time", - "name": "time", - "cType": "git_time_t", - "cppClassName": "Integer", - "jsClassName": "Number" - }, - { - "jsFunctionName": "offset", - "cppFunctionName": "Offset", - "name": "offset", - "cType": "int", - "cppClassName": "Int32", - "jsClassName": "Number" - } - ] - }, - { - "filename": "signature.h", - "dependencies": [ - "../include/time.h" - ], - "jsClassName": "Signature", - "cppClassName": "GitSignature", - "cType": "git_signature", - "freeFunctionName": "git_signature_free", - "fields": [ - { - "jsFunctionName": "name", - "cppFunctionName": "Name", - "name": "name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String" - }, - { - "jsFunctionName": "email", - "cppFunctionName": "Email", - "name": "email", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String" - }, - { - "jsFunctionName": "time", - "cppFunctionName": "Time", - "name": "when", - "cType": "git_time", - "cppClassName": "GitTime", - "jsClassName": "Time", - "copy": "git_time_dup" - } - ], - "functions": [ - { - "cFunctionName": "git_signature_new", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_signature **", - "cppClassName": "GitSignature", - "jsClassName": "Signature", - "comment": "new signature, in case of error NULL" - }, - { - "name": "name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "name of the person" - }, - { - "name": "email", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "email of the person" - }, - { - "name": "time", - "cType": "git_time_t", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "time when the action happened" - }, - { - "name": "offset", - "cType": "int", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "timezone offset in minutes for the time" - } - ], - "isAsync": false, - "isConstructorMethod": true, - "isPrototypeMethod": false, - "jsFunctionName": "create", - "cppFunctionName": "Create", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Create a new action signature.

\n" - }, - { - "cFunctionName": "git_signature_now", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_signature **", - "cppClassName": "GitSignature", - "jsClassName": "Signature", - "comment": "new signature, in case of error NULL" - }, - { - "name": "name", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "name of the person" - }, - { - "name": "email", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "email of the person" - } - ], - "isAsync": false, - "isConstructorMethod": true, - "isPrototypeMethod": false, - "jsFunctionName": "now", - "cppFunctionName": "Now", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Create a new action signature with a timestamp of 'now'.

\n" - }, - { - "cFunctionName": "git_signature_dup", - "args": [ - { - "name": "sig", - "cType": "const git_signature *", - "cppClassName": "GitSignature", - "jsClassName": "Signature", - "isSelf": true, - "comment": "signature to duplicated" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "dup", - "cppFunctionName": "Dup", - "return": { - "cType": "git_signature *", - "cppClassName": "GitSignature", - "comment": "a copy of sig, NULL on out of memory", - "jsClassName": "Signature" - }, - "description": "

Create a copy of an existing signature. All internal strings are also\nduplicated.

\n" - }, - { - "cFunctionName": "git_signature_free", - "args": [ - { - "name": "sig", - "cType": "git_signature *", - "cppClassName": "GitSignature", - "jsClassName": "Signature", - "comment": "signature to free" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "isFree": true, - "jsFunctionName": "free", - "cppFunctionName": "Free", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Free an existing signature.

\n" - } - ] - }, - { - "filename": "stash.h", - "ignore": true, - "jsClassName": "Stash", - "cppClassName": "Stash", - "cType": "git_stash", - "freeFunctionName": "git_stash_free", - "functions": [ - { - "cFunctionName": "git_stash_save", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "shouldAlloc": true, - "comment": "Object id of the commit containing the stashed state. This commit is also the target of the direct reference refs/stash." - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "The owning repository." - }, - { - "name": "stasher", - "cType": "git_signature *", - "cppClassName": "GitSignature", - "jsClassName": "Signature", - "comment": "The identity of the person performing the stashing." - }, - { - "name": "message", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "Optional description along with the stashed state." - }, - { - "name": "flags", - "cType": "unsigned int", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "Flags to control the stashing process. (see GIT_STASH_* above)" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "save", - "cppFunctionName": "Save", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_ENOTFOUND where there's nothing to stash, or error code.", - "jsClassName": "Number" - }, - "description": "

Save the local modifications to a new stash.

\n" - }, - { - "cFunctionName": "git_stash_foreach", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "Repository where to find the stash." - }, - { - "name": "callback", - "cType": "git_stash_cb", - "cppClassName": "StashCb", - "jsClassName": "StashCb", - "comment": "Callback to invoke per found stashed state. The most recent stash state will be enumerated first." - }, - { - "name": "payload", - "cType": "void *", - "cppClassName": "void", - "jsClassName": "void", - "comment": "Extra parameter to callback function." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "foreach", - "cppFunctionName": "Foreach", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_EUSER on non-zero callback, or error code", - "jsClassName": "Number" - }, - "description": "

Loop over all the stashed states and issue a callback for each one.

\n" - }, - { - "cFunctionName": "git_stash_drop", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "The owning repository." - }, - { - "name": "index", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "The position within the stash list. 0 points to the most recent stashed state." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "drop", - "cppFunctionName": "Drop", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, or error code", - "jsClassName": "Number" - }, - "description": "

Remove a single stashed state from the stash list.

\n" - } - ] - }, - { - "filename": "status.h", - "ignore": true, - "jsClassName": "Status", - "cppClassName": "Status", - "cType": "git_status", - "freeFunctionName": "git_status_free", - "functions": [ - { - "cFunctionName": "git_status_foreach", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "A repository object" - }, - { - "name": "callback", - "cType": "git_status_cb", - "cppClassName": "StatusCb", - "jsClassName": "StatusCb", - "comment": "The function to call on each file" - }, - { - "name": "payload", - "cType": "void *", - "cppClassName": "void", - "jsClassName": "void", - "comment": "Pointer to pass through to callback function" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "foreach", - "cppFunctionName": "Foreach", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_EUSER on non-zero callback, or error code", - "jsClassName": "Number" - }, - "description": "

Gather file statuses and run a callback for each one.

\n" - }, - { - "cFunctionName": "git_status_foreach_ext", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "Repository object" - }, - { - "name": "opts", - "cType": "const git_status_options *", - "cppClassName": "StatusOptions", - "jsClassName": "StatusOptions", - "comment": "Status options structure" - }, - { - "name": "callback", - "cType": "git_status_cb", - "cppClassName": "StatusCb", - "jsClassName": "StatusCb", - "comment": "The function to call on each file" - }, - { - "name": "payload", - "cType": "void *", - "cppClassName": "void", - "jsClassName": "void", - "comment": "Pointer to pass through to callback function" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "foreachExt", - "cppFunctionName": "ForeachExt", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_EUSER on non-zero callback, or error code", - "jsClassName": "Number" - }, - "description": "

Gather file status information and run callbacks as requested.

\n" - }, - { - "cFunctionName": "git_status_file", - "args": [ - { - "name": "status_flags", - "cType": "unsigned int *", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "The status value for the file" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "A repository object" - }, - { - "name": "path", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "The file to retrieve status for, rooted at the repo's workdir" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "file", - "cppFunctionName": "File", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, GIT_ENOTFOUND if the file is not found in the HEAD, index, and work tree, GIT_EINVALIDPATH if `path` points at a folder, GIT_EAMBIGUOUS if \"path\" matches multiple files, -1 on other error.", - "jsClassName": "Number" - }, - "description": "

Get file status for a single file.

\n" - }, - { - "cFunctionName": "git_status_should_ignore", - "args": [ - { - "name": "ignored", - "cType": "int *", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "Boolean returning 0 if the file is not ignored, 1 if it is" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "A repository object" - }, - { - "name": "path", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "The file to check ignores for, rooted at the repo's workdir." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "shouldIgnore", - "cppFunctionName": "ShouldIgnore", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 if ignore rules could be processed for the file (regardless of whether it exists or not), or an error < 0 if they could not.", - "jsClassName": "Number" - }, - "description": "

Test if the ignore rules apply to a given file.

\n" - } - ] - }, - { - "filename": "stdint.h", - "ignore": true, - "jsClassName": "Stdint", - "cppClassName": "Stdint", - "cType": "git_stdint", - "freeFunctionName": "git_stdint_free", - "functions": [] - }, - { - "filename": "strarray.h", - "ignore": true, - "jsClassName": "Strarray", - "cppClassName": "Strarray", - "cType": "git_strarray", - "freeFunctionName": "git_strarray_free", - "functions": [ - { - "cFunctionName": "git_strarray_free", - "args": [ - { - "name": "array", - "cType": "git_strarray *", - "cppClassName": "Strarray", - "jsClassName": "Strarray", - "comment": "git_strarray from which to free string data" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "isFree": true, - "jsFunctionName": "free", - "cppFunctionName": "Free", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Close a string array object

\n" - }, - { - "cFunctionName": "git_strarray_copy", - "args": [ - { - "name": "tgt", - "cType": "git_strarray *", - "cppClassName": "Strarray", - "jsClassName": "Strarray", - "isSelf": true, - "comment": "target" - }, - { - "name": "src", - "cType": "const git_strarray *", - "cppClassName": "Strarray", - "jsClassName": "Strarray", - "comment": "source" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "copy", - "cppFunctionName": "Copy", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, < 0 on allocation failure", - "jsClassName": "Number" - }, - "description": "

Copy a string array object from source to target.

\n" - } - ] - }, - { - "filename": "submodule.h", - "dependencies": [ - "../include/oid.h", - "../include/repo.h" - ], - "jsClassName": "Submodule", - "cppClassName": "GitSubmodule", - "cType": "git_submodule", - "freeFunctionName": "free", - "functions": [ - { - "cFunctionName": "git_submodule_foreach", - "args": [ - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "The repository" - }, - { - "name": "callback", - "cType": "int (*)(git_submodule *sm, const char *name, void *payload)", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "Function to be called with the name of each submodule. Return a non-zero value to terminate the iteration." - }, - { - "name": "payload", - "cType": "void *", - "cppClassName": "void", - "jsClassName": "void", - "comment": "Extra data to pass to callback" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "foreach", - "cppFunctionName": "Foreach", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, -1 on error, or non-zero return value of callback", - "jsClassName": "Number" - }, - "description": "

Iterate over all tracked submodules of a repository.

\n" - }, - { - "cFunctionName": "git_submodule_add_finalize", - "args": [ - { - "name": "submodule", - "cType": "git_submodule *", - "cppClassName": "GitSubmodule", - "jsClassName": "Submodule", - "isSelf": true, - "comment": "The submodule to finish adding." - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "addFinalize", - "cppFunctionName": "AddFinalize", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Resolve the setup of a new git submodule.

\n" - }, - { - "cFunctionName": "git_submodule_add_to_index", - "args": [ - { - "name": "submodule", - "cType": "git_submodule *", - "cppClassName": "GitSubmodule", - "jsClassName": "Submodule", - "isSelf": true, - "comment": "The submodule to add to the index" - }, - { - "name": "write_index", - "cType": "int", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "Boolean if this should immediately write the index file. If you pass this as false, you will have to get the git_index and explicitly call `git_index_write()` on it to save the change." - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "addToIndex", - "cppFunctionName": "AddToIndex", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, <0 on failure", - "jsClassName": "Number" - }, - "description": "

Add current submodule HEAD commit to index of superproject.

\n" - }, - { - "cFunctionName": "git_submodule_save", - "args": [ - { - "name": "submodule", - "cType": "git_submodule *", - "cppClassName": "GitSubmodule", - "jsClassName": "Submodule", - "isSelf": true, - "comment": "The submodule to write." - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "save", - "cppFunctionName": "Save", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, <0 on failure.", - "jsClassName": "Number" - }, - "description": "

Write submodule settings to .gitmodules file.

\n" - }, - { - "cFunctionName": "git_submodule_owner", - "args": [ - { - "name": "submodule", - "cType": "git_submodule *", - "cppClassName": "GitSubmodule", - "jsClassName": "Submodule", - "isSelf": true, - "comment": "Pointer to submodule object" - } - ], - "ignore": "Never make public for memory allocation reasons", - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "owner", - "cppFunctionName": "Owner", - "return": { - "cType": "git_repository *", - "cppClassName": "GitRepo", - "comment": "Pointer to `git_repository`", - "jsClassName": "Repository" - }, - "description": "

Get the containing repository for a submodule.

\n" - }, - { - "cFunctionName": "git_submodule_name", - "args": [ - { - "name": "submodule", - "cType": "git_submodule *", - "cppClassName": "GitSubmodule", - "jsClassName": "Submodule", - "isSelf": true, - "comment": "Pointer to submodule object" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "name", - "cppFunctionName": "Name", - "return": { - "cType": "const char *", - "cppClassName": "String", - "comment": "Pointer to the submodule name", - "jsClassName": "String" - }, - "description": "

Get the name of submodule.

\n" - }, - { - "cFunctionName": "git_submodule_path", - "args": [ - { - "name": "submodule", - "cType": "git_submodule *", - "cppClassName": "GitSubmodule", - "jsClassName": "Submodule", - "isSelf": true, - "comment": "Pointer to submodule object" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "path", - "cppFunctionName": "Path", - "return": { - "cType": "const char *", - "cppClassName": "String", - "comment": "Pointer to the submodule path", - "jsClassName": "String" - }, - "description": "

Get the path to the submodule.

\n" - }, - { - "cFunctionName": "git_submodule_url", - "args": [ - { - "name": "submodule", - "cType": "git_submodule *", - "cppClassName": "GitSubmodule", - "jsClassName": "Submodule", - "isSelf": true, - "comment": "Pointer to submodule object" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "url", - "cppFunctionName": "Url", - "return": { - "cType": "const char *", - "cppClassName": "String", - "comment": "Pointer to the submodule url", - "jsClassName": "String" - }, - "description": "

Get the URL for the submodule.

\n" - }, - { - "cFunctionName": "git_submodule_set_url", - "args": [ - { - "name": "submodule", - "cType": "git_submodule *", - "cppClassName": "GitSubmodule", - "jsClassName": "Submodule", - "isSelf": true, - "comment": "Pointer to the submodule object" - }, - { - "name": "url", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "URL that should be used for the submodule" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "setUrl", - "cppFunctionName": "SetUrl", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, <0 on failure", - "jsClassName": "Number" - }, - "description": "

Set the URL for the submodule.

\n" - }, - { - "cFunctionName": "git_submodule_index_id", - "args": [ - { - "name": "submodule", - "cType": "git_submodule *", - "cppClassName": "GitSubmodule", - "jsClassName": "Submodule", - "isSelf": true, - "comment": "Pointer to submodule object" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "indexId", - "cppFunctionName": "IndexId", - "return": { - "cType": "const git_oid *", - "cppClassName": "GitOid", - "copy": "git_oid_dup", - "comment": "Pointer to git_oid or NULL if submodule is not in index.", - "jsClassName": "Oid" - }, - "description": "

Get the OID for the submodule in the index.

\n" - }, - { - "cFunctionName": "git_submodule_head_id", - "args": [ - { - "name": "submodule", - "cType": "git_submodule *", - "cppClassName": "GitSubmodule", - "jsClassName": "Submodule", - "isSelf": true, - "comment": "Pointer to submodule object" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "headId", - "cppFunctionName": "HeadId", - "return": { - "cType": "const git_oid *", - "cppClassName": "GitOid", - "copy": "git_oid_dup", - "comment": "Pointer to git_oid or NULL if submodule is not in the HEAD.", - "jsClassName": "Oid" - }, - "description": "

Get the OID for the submodule in the current HEAD tree.

\n" - }, - { - "cFunctionName": "git_submodule_wd_id", - "args": [ - { - "name": "submodule", - "cType": "git_submodule *", - "cppClassName": "GitSubmodule", - "jsClassName": "Submodule", - "isSelf": true, - "comment": "Pointer to submodule object" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "wdId", - "cppFunctionName": "WdId", - "return": { - "cType": "const git_oid *", - "cppClassName": "GitOid", - "copy": "git_oid_dup", - "comment": "Pointer to git_oid or NULL if submodule is not checked out.", - "jsClassName": "Oid" - }, - "description": "

Get the OID for the submodule in the current working directory.

\n" - }, - { - "cFunctionName": "git_submodule_ignore", - "args": [], - "ignore": true, - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "ignore", - "cppFunctionName": "Ignore", - "return": { - "cType": "GIT_EXTERN(", - "cppClassName": "GIT_EXTERN(" - }, - "description": "

Get the ignore rule for the submodule.

\n" - }, - { - "cFunctionName": "git_submodule_set_ignore", - "args": [ - { - "name": "submodule", - "cType": "git_submodule *", - "cppClassName": "GitSubmodule", - "jsClassName": "Submodule", - "isSelf": true - }, - { - "name": "ignore", - "cType": "git_submodule_ignore_t", - "cppClassName": "Uint32", - "jsClassName": "Uint32" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "setIgnore", - "cppFunctionName": "SetIgnore", - "return": { - "cType": "git_submodule_ignore_t", - "cppClassName": "Uint32", - "comment": "old value for ignore", - "jsClassName": "Number" - }, - "description": "

Set the ignore rule for the submodule.

\n" - }, - { - "cFunctionName": "git_submodule_update", - "ignore": true, - "args": [], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "update", - "cppFunctionName": "Update", - "return": { - "cType": "GIT_EXTERN(", - "cppClassName": "GIT_EXTERN(" - }, - "description": "

Get the update rule for the submodule.

\n" - }, - { - "cFunctionName": "git_submodule_set_update", - "args": [ - { - "name": "submodule", - "cType": "git_submodule *", - "cppClassName": "GitSubmodule", - "jsClassName": "Submodule", - "isSelf": true - }, - { - "name": "update", - "cType": "git_submodule_update_t", - "cppClassName": "Uint32", - "jsClassName": "Uint32" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "setUpdate", - "cppFunctionName": "SetUpdate", - "return": { - "cType": "git_submodule_update_t", - "cppClassName": "SubmoduleUpdateT", - "comment": "old value for update" - }, - "description": "

Set the update rule for the submodule.

\n" - }, - { - "cFunctionName": "git_submodule_fetch_recurse_submodules", - "args": [ - { - "name": "submodule", - "cType": "git_submodule *", - "cppClassName": "GitSubmodule", - "jsClassName": "Submodule", - "isSelf": true - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "fetchRecurseSubmodules", - "cppFunctionName": "FetchRecurseSubmodules", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 if fetchRecurseSubmodules is false, 1 if true", - "jsClassName": "Number" - }, - "description": "

Read the fetchRecurseSubmodules rule for a submodule.

\n" - }, - { - "cFunctionName": "git_submodule_set_fetch_recurse_submodules", - "args": [ - { - "name": "submodule", - "cType": "git_submodule *", - "cppClassName": "GitSubmodule", - "jsClassName": "Submodule", - "isSelf": true, - "comment": "The submodule to modify" - }, - { - "name": "fetch_recurse_submodules", - "cType": "int", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "Boolean value" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "setFetchRecurseSubmodules", - "cppFunctionName": "SetFetchRecurseSubmodules", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "old value for fetchRecurseSubmodules", - "jsClassName": "Number" - }, - "description": "

Set the fetchRecurseSubmodules rule for a submodule.

\n" - }, - { - "cFunctionName": "git_submodule_init", - "args": [ - { - "name": "submodule", - "cType": "git_submodule *", - "cppClassName": "GitSubmodule", - "jsClassName": "Submodule", - "isSelf": true, - "comment": "The submodule to write into the superproject config" - }, - { - "name": "overwrite", - "cType": "int", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "By default, existing entries will not be overwritten, but setting this to true forces them to be updated." - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "init", - "cppFunctionName": "Init", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, <0 on failure.", - "jsClassName": "Number" - }, - "description": "

Copy submodule info into ".git/config" file.

\n" - }, - { - "cFunctionName": "git_submodule_sync", - "args": [ - { - "name": "submodule", - "cType": "git_submodule *", - "cppClassName": "GitSubmodule", - "jsClassName": "Submodule", - "isSelf": true - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "sync", - "cppFunctionName": "Sync", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Copy submodule remote info into submodule repo.

\n" - }, - { - "cFunctionName": "git_submodule_open", - "args": [ - { - "name": "repo", - "cType": "git_repository **", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "isReturn": true, - "comment": "Pointer to the submodule repo which was opened" - }, - { - "name": "submodule", - "cType": "git_submodule *", - "cppClassName": "GitSubmodule", - "jsClassName": "Submodule", - "isSelf": true, - "comment": "Submodule to be opened" - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "open", - "cppFunctionName": "Open", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, <0 if submodule repo could not be opened.", - "jsClassName": "Number" - }, - "description": "

Open the repository for a submodule.

\n" - }, - { - "cFunctionName": "git_submodule_reload", - "args": [ - { - "name": "submodule", - "cType": "git_submodule *", - "cppClassName": "GitSubmodule", - "jsClassName": "Submodule", - "isSelf": true - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "reload", - "cppFunctionName": "Reload", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Reread submodule info from config, index, and HEAD.

\n" - }, - { - "cFunctionName": "git_submodule_status", - "args": [ - { - "name": "status", - "cType": "unsigned int *", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "Combination of `GIT_SUBMODULE_STATUS` flags" - }, - { - "name": "submodule", - "cType": "git_submodule *", - "cppClassName": "GitSubmodule", - "jsClassName": "Submodule", - "isSelf": true, - "comment": "Submodule for which to get status" - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "status", - "cppFunctionName": "Status", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, <0 on error", - "jsClassName": "Number" - }, - "description": "

Get the status for a submodule.

\n" - }, - { - "cFunctionName": "git_submodule_location", - "args": [ - { - "name": "location_status", - "cType": "unsigned int *", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "Combination of first four `GIT_SUBMODULE_STATUS` flags" - }, - { - "name": "submodule", - "cType": "git_submodule *", - "cppClassName": "GitSubmodule", - "jsClassName": "Submodule", - "comment": "Submodule for which to get status" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "location", - "cppFunctionName": "Location", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success, <0 on error", - "jsClassName": "Number" - }, - "description": "

Get the locations of submodule information.

\n" - } - ] - }, - { - "filename": "tag.h", - "dependencies": [ - "../include/oid.h", - "../include/repo.h", - "../include/object.h", - "../include/signature.h" - ], - "jsClassName": "Tag", - "cppClassName": "GitTag", - "cType": "git_tag", - "freeFunctionName": "git_tag_free", - "functions": [ - { - "cFunctionName": "git_tag_free", - "args": [ - { - "name": "tag", - "cType": "git_tag *", - "cppClassName": "GitTag", - "jsClassName": "Tag", - "comment": "the tag to close" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "isFree": true, - "jsFunctionName": "free", - "cppFunctionName": "Free", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Close an open tag

\n" - }, - { - "cFunctionName": "git_tag_id", - "args": [ - { - "name": "tag", - "cType": "const git_tag *", - "cppClassName": "GitTag", - "jsClassName": "Tag", - "isSelf": true, - "comment": "a previously loaded tag." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "oid", - "cppFunctionName": "Oid", - "return": { - "cType": "const git_oid *", - "cppClassName": "GitOid", - "copy": "git_oid_dup", - "comment": "object identity for the tag.", - "jsClassName": "Oid" - }, - "description": "

Get the id of a tag.

\n" - }, - { - "cFunctionName": "git_tag_target", - "args": [ - { - "name": "target_out", - "cType": "git_object **", - "cppClassName": "GitObject", - "jsClassName": "Object", - "isReturn": true, - "comment": "pointer where to store the target" - }, - { - "name": "tag", - "cType": "const git_tag *", - "cppClassName": "GitTag", - "jsClassName": "Tag", - "isSelf": true, - "comment": "a previously loaded tag." - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "getTarget", - "cppFunctionName": "GetTarget", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Get the tagged object of a tag

\n" - }, - { - "cFunctionName": "git_tag_target_id", - "args": [ - { - "name": "tag", - "cType": "const git_tag *", - "cppClassName": "GitTag", - "jsClassName": "Tag", - "isSelf": true, - "comment": "a previously loaded tag." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "targetId", - "cppFunctionName": "TargetId", - "return": { - "cType": "const git_oid *", - "cppClassName": "GitOid", - "copy": "git_oid_dup", - "comment": "pointer to the OID", - "jsClassName": "Oid" - }, - "description": "

Get the OID of the tagged object of a tag

\n" - }, - { - "cFunctionName": "git_tag_target_type", - "args": [ - { - "name": "tag", - "cType": "const git_tag *", - "cppClassName": "GitTag", - "jsClassName": "Tag", - "isSelf": true, - "comment": "a previously loaded tag." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "targetType", - "cppFunctionName": "TargetType", - "return": { - "cType": "git_otype", - "cppClassName": "Int32", - "comment": "type of the tagged object", - "jsClassName": "Number" - }, - "description": "

Get the type of a tag's tagged object

\n" - }, - { - "cFunctionName": "git_tag_name", - "args": [ - { - "name": "tag", - "cType": "const git_tag *", - "cppClassName": "GitTag", - "jsClassName": "Tag", - "isSelf": true, - "comment": "a previously loaded tag." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "name", - "cppFunctionName": "Name", - "return": { - "cType": "const char *", - "cppClassName": "String", - "comment": "name of the tag", - "jsClassName": "String" - }, - "description": "

Get the name of a tag

\n" - }, - { - "cFunctionName": "git_tag_tagger", - "args": [ - { - "name": "tag", - "cType": "const git_tag *", - "cppClassName": "GitTag", - "jsClassName": "Tag", - "isSelf": true, - "comment": "a previously loaded tag." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "tagger", - "cppFunctionName": "Tagger", - "return": { - "cType": "const git_signature *", - "cppClassName": "GitSignature", - "copy": "git_signature_dup", - "comment": "reference to the tag's author or NULL when unspecified", - "jsClassName": "Signature" - }, - "description": "

Get the tagger (author) of a tag

\n" - }, - { - "cFunctionName": "git_tag_message", - "args": [ - { - "name": "tag", - "cType": "const git_tag *", - "cppClassName": "GitTag", - "jsClassName": "Tag", - "isSelf": true, - "comment": "a previously loaded tag." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "message", - "cppFunctionName": "Message", - "return": { - "cType": "const char *", - "cppClassName": "String", - "comment": "message of the tag or NULL when unspecified", - "jsClassName": "String" - }, - "description": "

Get the message of a tag

\n" - }, - { - "cFunctionName": "git_tag_create_frombuffer", - "args": [ - { - "name": "oid", - "cType": "git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "Pointer where to store the OID of the newly created tag" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "Repository where to store the tag" - }, - { - "name": "buffer", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "Raw tag data" - }, - { - "name": "force", - "cType": "int", - "cppClassName": "Int32", - "jsClassName": "Number", - "comment": "Overwrite existing tags" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "createFrombuffer", - "cppFunctionName": "CreateFrombuffer", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success; error code otherwise", - "jsClassName": "Number" - }, - "description": "

Create a new tag in the repository from a buffer

\n" - }, - { - "cFunctionName": "git_tag_peel", - "args": [ - { - "name": "tag_target_out", - "cType": "git_object **", - "cppClassName": "GitObject", - "jsClassName": "Object", - "isReturn": true, - "comment": "Pointer to the peeled git_object" - }, - { - "name": "tag", - "cType": "const git_tag *", - "cppClassName": "GitTag", - "jsClassName": "Tag", - "comment": "_target_out Pointer to the peeled git_object" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "peel", - "cppFunctionName": "Peel", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Recursively peel a tag until a non tag git_object is found

\n" - } - ] - }, - { - "filename": "threads.h", - "dependencies": [], - "jsClassName": "Threads", - "cppClassName": "GitThreads", - "functions": [ - { - "cFunctionName": "git_threads_init", - "args": [], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "init", - "cppFunctionName": "Init", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Init the threading system.

\n" - }, - { - "cFunctionName": "git_threads_shutdown", - "args": [], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "shutdown", - "cppFunctionName": "Shutdown", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Shutdown the threading system.

\n" - } - ] - }, - { - "filename": "trace.h", - "ignore": true, - "jsClassName": "Trace", - "cppClassName": "Trace", - "cType": "git_trace", - "freeFunctionName": "git_trace_free", - "functions": [ - { - "cFunctionName": "git_trace_set", - "args": [ - { - "name": "level", - "cType": "git_trace_level_t", - "cppClassName": "TraceLevelT", - "jsClassName": "TraceLevelT", - "comment": "Level to set tracing to" - }, - { - "name": "cb", - "cType": "git_trace_callback", - "cppClassName": "TraceCallback", - "jsClassName": "TraceCallback", - "comment": "Function to call with trace data" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "set", - "cppFunctionName": "Set", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Sets the system tracing configuration to the specified level with the\nspecified callback. When system events occur at a level equal to, or\nlower than, the given level they will be reported to the given callback.

\n" - } - ] - }, - { - "filename": "transport.h", - "ignore": true, - "jsClassName": "Transport", - "cppClassName": "Transport", - "cType": "git_transport", - "freeFunctionName": "git_transport_free", - "functions": [ - { - "cFunctionName": "git_cred_userpass_plaintext_new", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_cred **", - "cppClassName": "Cred", - "jsClassName": "Cred", - "comment": "The newly created credential object." - }, - { - "name": "username", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "The username of the credential." - }, - { - "name": "password", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "The password of the credential." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "gitCredUserpassPlaintextNew", - "cppFunctionName": "GitCredUserpassPlaintextNew", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 for success or an error code for failure", - "jsClassName": "Number" - }, - "description": "

Creates a new plain-text username and password credential object.\nThe supplied credential parameter will be internally duplicated.

\n" - }, - { - "cFunctionName": "git_transport_new", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_transport **", - "cppClassName": "Transport", - "jsClassName": "Transport", - "comment": "The newly created transport (out)" - }, - { - "name": "owner", - "cType": "git_remote *", - "cppClassName": "GitRemote", - "jsClassName": "Remote", - "comment": "The git_remote which will own this transport" - }, - { - "name": "url", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "The URL to connect to" - } - ], - "isAsync": false, - "isConstructorMethod": true, - "isPrototypeMethod": false, - "jsFunctionName": "new", - "cppFunctionName": "New", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Function to use to create a transport from a URL. The transport database\nis scanned to find a transport that implements the scheme of the URI (i.e.\ngit:// or http://) and a transport object is returned to the caller.

\n" - }, - { - "cFunctionName": "git_transport_dummy", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_transport **", - "cppClassName": "Transport", - "jsClassName": "Transport", - "comment": "The newly created transport (out)" - }, - { - "name": "owner", - "cType": "git_remote *", - "cppClassName": "GitRemote", - "jsClassName": "Remote", - "comment": "The git_remote which will own this transport" - }, - { - "name": "payload", - "cType": "void *", - "cppClassName": "void", - "jsClassName": "void", - "comment": "You must pass NULL for this parameter." - } - ], - "isAsync": false, - "isConstructorMethod": true, - "isPrototypeMethod": false, - "jsFunctionName": "dummy", - "cppFunctionName": "Dummy", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Create an instance of the dummy transport.

\n" - }, - { - "cFunctionName": "git_transport_local", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_transport **", - "cppClassName": "Transport", - "jsClassName": "Transport", - "comment": "The newly created transport (out)" - }, - { - "name": "owner", - "cType": "git_remote *", - "cppClassName": "GitRemote", - "jsClassName": "Remote", - "comment": "The git_remote which will own this transport" - }, - { - "name": "payload", - "cType": "void *", - "cppClassName": "void", - "jsClassName": "void", - "comment": "You must pass NULL for this parameter." - } - ], - "isAsync": false, - "isConstructorMethod": true, - "isPrototypeMethod": false, - "jsFunctionName": "local", - "cppFunctionName": "Local", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Create an instance of the local transport.

\n" - }, - { - "cFunctionName": "git_transport_smart", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_transport **", - "cppClassName": "Transport", - "jsClassName": "Transport", - "comment": "The newly created transport (out)" - }, - { - "name": "owner", - "cType": "git_remote *", - "cppClassName": "GitRemote", - "jsClassName": "Remote", - "comment": "The git_remote which will own this transport" - }, - { - "name": "payload", - "cType": "void *", - "cppClassName": "void", - "jsClassName": "void", - "comment": "A pointer to a git_smart_subtransport_definition" - } - ], - "isAsync": false, - "isConstructorMethod": true, - "isPrototypeMethod": false, - "jsFunctionName": "smart", - "cppFunctionName": "Smart", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Create an instance of the smart transport.

\n" - }, - { - "cFunctionName": "git_smart_subtransport_http", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_smart_subtransport **", - "cppClassName": "SmartSubtransport", - "jsClassName": "SmartSubtransport", - "comment": "The newly created subtransport" - }, - { - "name": "owner", - "cType": "git_transport*", - "cppClassName": "Transport*", - "jsClassName": "Transport*", - "comment": "The smart transport to own this subtransport" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "gitSmartSubtransportHttp", - "cppFunctionName": "GitSmartSubtransportHttp", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Create an instance of the http subtransport. This subtransport\nalso supports https. On Win32, this subtransport may be implemented\nusing the WinHTTP library.

\n" - }, - { - "cFunctionName": "git_smart_subtransport_git", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_smart_subtransport **", - "cppClassName": "SmartSubtransport", - "jsClassName": "SmartSubtransport", - "comment": "The newly created subtransport" - }, - { - "name": "owner", - "cType": "git_transport*", - "cppClassName": "Transport*", - "jsClassName": "Transport*", - "comment": "The smart transport to own this subtransport" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "gitSmartSubtransportGit", - "cppFunctionName": "GitSmartSubtransportGit", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Create an instance of the git subtransport.

\n" - } - ] - }, - { - "filename": "tree_entry.h", - "dependencies": [ - "../include/oid.h", - "../include/repo.h", - "../include/object.h" - ], - "jsClassName": "TreeEntry", - "cppClassName": "GitTreeEntry", - "cType": "git_tree_entry", - "freeFunctionName": "git_tree_entry_free", - "functions": [ - { - "cFunctionName": "git_tree_entry_dup", - "args": [ - { - "name": "entry", - "cType": "const git_tree_entry *", - "cppClassName": "GitTreeEntry", - "jsClassName": "TreeEntry", - "isSelf": true, - "comment": "A tree entry to duplicate" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "dup", - "cppFunctionName": "Dup", - "return": { - "cType": "git_tree_entry *", - "cppClassName": "GitTreeEntry", - "comment": "a copy of the original entry or NULL on error (alloc failure)", - "jsClassName": "TreeEntry" - }, - "description": "

Duplicate a tree entry

\n" - }, - { - "cFunctionName": "git_tree_entry_free", - "args": [ - { - "name": "entry", - "cType": "git_tree_entry *", - "cppClassName": "GitTreeEntry", - "jsClassName": "TreeEntry", - "isSelf": true, - "comment": "The entry to free" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "isFree": true, - "jsFunctionName": "free", - "cppFunctionName": "Free", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Free a user-owned tree entry

\n" - }, - { - "cFunctionName": "git_tree_entry_name", - "args": [ - { - "name": "entry", - "cType": "const git_tree_entry *", - "cppClassName": "GitTreeEntry", - "jsClassName": "TreeEntry", - "isSelf": true, - "comment": "a tree entry" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "name", - "cppFunctionName": "Name", - "return": { - "cType": "const char *", - "cppClassName": "String", - "comment": "the name of the file", - "jsClassName": "String" - }, - "description": "

Get the filename of a tree entry

\n" - }, - { - "cFunctionName": "git_tree_entry_id", - "args": [ - { - "name": "entry", - "cType": "const git_tree_entry *", - "cppClassName": "GitTreeEntry", - "jsClassName": "TreeEntry", - "isSelf": true, - "comment": "a tree entry" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "oid", - "cppFunctionName": "Oid", - "return": { - "cType": "const git_oid *", - "cppClassName": "GitOid", - "copy": "git_oid_dup", - "comment": "the oid of the object", - "jsClassName": "Oid" - }, - "description": "

Get the id of the object pointed by the entry

\n" - }, - { - "cFunctionName": "git_tree_entry_type", - "args": [ - { - "name": "entry", - "cType": "const git_tree_entry *", - "cppClassName": "GitTreeEntry", - "jsClassName": "TreeEntry", - "isSelf": true, - "comment": "a tree entry" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "type", - "cppFunctionName": "Type", - "return": { - "cType": "git_otype", - "cppClassName": "Number", - "comment": "the type of the pointed object", - "jsClassName": "Number" - }, - "description": "

Get the type of the object pointed by the entry

\n" - }, - { - "cFunctionName": "git_tree_entry_filemode", - "args": [ - { - "name": "entry", - "cType": "const git_tree_entry *", - "cppClassName": "GitTreeEntry", - "jsClassName": "TreeEntry", - "isSelf": true, - "comment": "a tree entry" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "filemode", - "cppFunctionName": "filemode", - "return": { - "cType": "git_filemode_t", - "cppClassName": "Number", - "comment": "filemode as an integer", - "jsClassName": "Number" - }, - "description": "

Get the UNIX file attributes of a tree entry

\n" - }, - { - "cFunctionName": "git_tree_entry_cmp", - "args": [ - { - "name": "e1", - "cType": "const git_tree_entry *", - "cppClassName": "GitTreeEntry", - "jsClassName": "TreeEntry", - "isSelf": true, - "comment": "first tree entry" - }, - { - "name": "e2", - "cType": "const git_tree_entry *", - "cppClassName": "GitTreeEntry", - "jsClassName": "TreeEntry", - "comment": "second tree entry" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "compare", - "cppFunctionName": "Compare", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "<0 if e1 is before e2, 0 if e1 == e2, >0 if e1 is after e2", - "jsClassName": "Number" - }, - "description": "

Compare two tree entries

\n" - }, - { - "cFunctionName": "git_tree_entry_to_object", - "args": [ - { - "name": "object_out", - "cType": "git_object **", - "cppClassName": "GitObject", - "jsClassName": "Object", - "isReturn": true, - "comment": "pointer to the converted object" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "repository where to lookup the pointed object" - }, - { - "name": "entry", - "cType": "const git_tree_entry *", - "cppClassName": "GitTreeEntry", - "jsClassName": "TreeEntry", - "isSelf": true, - "comment": "a tree entry" - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "getObject", - "cppFunctionName": "GetObject", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Convert a tree entry to the git_object it points too.

\n" - } - ] - }, - { - "filename": "tree.h", - "dependencies": [ - "../include/repo.h", - "../include/oid.h", - "../include/tree_entry.h", - "../include/diff_list.h", - "../include/diff_options.h", - "../include/tree_builder.h", - "../include/index.h" - ], - "jsClassName": "Tree", - "cppClassName": "GitTree", - "cType": "git_tree", - "freeFunctionName": "git_tree_free", - "functions": [ - { - "cFunctionName": "git_tree_free", - "args": [ - { - "name": "tree", - "cType": "git_tree *", - "cppClassName": "GitTree", - "jsClassName": "Tree", - "comment": "The tree to close" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "isFree": true, - "jsFunctionName": "free", - "cppFunctionName": "Free", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Close an open tree

\n" - }, - { - "cFunctionName": "git_tree_id", - "args": [ - { - "name": "tree", - "cType": "const git_tree *", - "cppClassName": "GitTree", - "jsClassName": "Tree", - "isSelf": true, - "comment": "a previously loaded tree." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "oid", - "cppFunctionName": "Oid", - "return": { - "cType": "const git_oid *", - "cppClassName": "GitOid", - "copy": "git_oid_dup", - "comment": "object identity for the tree.", - "jsClassName": "Oid" - }, - "description": "

Get the id of a tree.

\n" - }, - { - "cFunctionName": "git_tree_owner", - "args": [ - { - "name": "tree", - "cType": "const git_tree *", - "cppClassName": "GitTree", - "jsClassName": "Tree", - "isSelf": true, - "comment": "A previously loaded tree." - } - ], - "ignore": "Never make public for memory allocation reasons", - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "owner", - "cppFunctionName": "Owner", - "return": { - "cType": "git_repository *", - "cppClassName": "GitRepo", - "comment": "Repository that contains this tree.", - "jsClassName": "Repository" - }, - "description": "

Get the repository that contains the tree.

\n" - }, - { - "cFunctionName": "git_tree_entrycount", - "args": [ - { - "name": "tree", - "cType": "const git_tree *", - "cppClassName": "GitTree", - "jsClassName": "Tree", - "isSelf": true, - "comment": "a previously loaded tree." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "size", - "cppFunctionName": "Size", - "return": { - "cType": "size_t", - "cppClassName": "Uint32", - "comment": "the number of entries in the tree", - "jsClassName": "Number" - }, - "description": "

Get the number of entries listed in a tree

\n" - }, - { - "cFunctionName": "git_tree_entry_byname", - "args": [ - { - "name": "tree", - "cType": "git_tree *", - "cppClassName": "GitTree", - "jsClassName": "Tree", - "isSelf": true, - "comment": "a previously loaded tree." - }, - { - "name": "filename", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "the filename of the desired entry" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "entryByName", - "cppFunctionName": "EntryByName", - "return": { - "cType": "const git_tree_entry *", - "cppClassName": "GitTreeEntry", - "copy": "git_tree_entry_dup", - "comment": "the tree entry; NULL if not found", - "jsClassName": "TreeEntry" - }, - "description": "

Lookup a tree entry by its filename

\n" - }, - { - "cFunctionName": "git_tree_entry_byindex", - "args": [ - { - "name": "tree", - "cType": "git_tree *", - "cppClassName": "GitTree", - "jsClassName": "Tree", - "isSelf": true, - "comment": "a previously loaded tree." - }, - { - "name": "idx", - "cType": "size_t", - "cppClassName": "Uint32", - "jsClassName": "Number", - "comment": "the position in the entry list" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "entryByIndex", - "cppFunctionName": "EntryByIndex", - "return": { - "cType": "const git_tree_entry *", - "cppClassName": "GitTreeEntry", - "copy": "git_tree_entry_dup", - "comment": "the tree entry; NULL if not found", - "jsClassName": "TreeEntry" - }, - "description": "

Lookup a tree entry by its position in the tree

\n" - }, - { - "cFunctionName": "git_tree_entry_byoid", - "args": [ - { - "name": "tree", - "cType": "const git_tree *", - "cppClassName": "GitTree", - "jsClassName": "Tree", - "isSelf": true, - "comment": "a previously loaded tree." - }, - { - "name": "oid", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "the sha being looked for" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "entryByOid", - "cppFunctionName": "EntryByOid", - "return": { - "cType": "const git_tree_entry *", - "cppClassName": "GitTreeEntry", - "copy": "git_tree_entry_dup", - "comment": "the tree entry; NULL if not found", - "jsClassName": "TreeEntry" - }, - "description": "

Lookup a tree entry by SHA value.

\n" - }, - { - "cFunctionName": "git_tree_entry_bypath", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_tree_entry **", - "cppClassName": "GitTreeEntry", - "jsClassName": "TreeEntry", - "comment": "Pointer where to store the tree entry" - }, - { - "name": "root", - "cType": "git_tree *", - "cppClassName": "GitTree", - "jsClassName": "Tree", - "isSelf": true, - "comment": "Previously loaded tree which is the root of the relative path" - }, - { - "name": "path", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "Path to the contained entry" - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "getEntry", - "cppFunctionName": "GetEntry", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success; GIT_ENOTFOUND if the path does not exist", - "jsClassName": "Number" - }, - "description": "

Retrieve a tree entry contained in a tree or in any of its subtrees,\ngiven its relative path.

\n" - }, - { - "cFunctionName": "git_treebuilder_create", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_treebuilder **", - "cppClassName": "GitTreeBuilder", - "jsClassName": "TreeBuilder", - "comment": "Pointer where to store the tree builder" - }, - { - "name": "source", - "cType": "const git_tree *", - "cppClassName": "GitTree", - "jsClassName": "Tree", - "isSelf": true, - "comment": "Source tree to initialize the builder (optional)" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "builder", - "cppFunctionName": "Builder", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success; error code otherwise", - "jsClassName": "Number" - }, - "description": "

Create a new tree builder.

\n" - }, - { - "cFunctionName": "git_tree_walk", - "args": [ - { - "name": "tree", - "cType": "const git_tree *", - "cppClassName": "GitTree", - "jsClassName": "Tree", - "isSelf": true, - "comment": "The tree to walk" - }, - { - "name": "mode", - "cType": "git_treewalk_mode", - "cppClassName": "TreewalkMode", - "jsClassName": "TreewalkMode", - "comment": "Traversal mode (pre or post-order)" - }, - { - "name": "callback", - "cType": "git_treewalk_cb", - "cppClassName": "TreewalkCb", - "jsClassName": "TreewalkCb", - "comment": "Function to call on each tree entry" - }, - { - "name": "payload", - "cType": "void *", - "cppClassName": "void", - "jsClassName": "void", - "comment": "Opaque pointer to be passed on each callback" - } - ], - "ignore": true, - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "walk", - "cppFunctionName": "Walk", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Traverse the entries in a tree and its subtrees in post or pre order.

\n" - }, - { - "cFunctionName": "git_diff_tree_to_tree", - "args": [ - { - "name": "diff", - "cType": "git_diff_list **", - "cppClassName": "GitDiffList", - "jsClassName": "DiffList", - "isReturn": true, - "comment": "Output pointer to a git_diff_list pointer to be allocated." - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "The repository containing the trees." - }, - { - "name": "old_tree", - "cType": "git_tree *", - "cppClassName": "GitTree", - "jsClassName": "Tree", - "isSelf": true, - "comment": "A git_tree object to diff from, or NULL for empty tree." - }, - { - "name": "new_tree", - "cType": "git_tree *", - "cppClassName": "GitTree", - "jsClassName": "Tree", - "comment": "A git_tree object to diff to, or NULL for empty tree." - }, - { - "name": "opts", - "cType": "const git_diff_options *", - "cppClassName": "GitDiffOptions", - "jsClassName": "DiffOptions", - "isOptional": true, - "comment": "Structure with options to influence diff or NULL for defaults." - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "diffTree", - "cppFunctionName": "DiffTree", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Create a diff list with the difference between two tree objects.

\n" - }, - { - "cFunctionName": "git_diff_tree_to_index", - "args": [ - { - "name": "diff", - "cType": "git_diff_list **", - "cppClassName": "GitDiffList", - "jsClassName": "DiffList", - "isReturn": true, - "comment": "Output pointer to a git_diff_list pointer to be allocated." - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "The repository containing the tree and index." - }, - { - "name": "old_tree", - "cType": "git_tree *", - "cppClassName": "GitTree", - "jsClassName": "Tree", - "isSelf": true, - "comment": "A git_tree object to diff from, or NULL for empty tree." - }, - { - "name": "index", - "cType": "git_index *", - "cppClassName": "GitIndex", - "jsClassName": "Index", - "isOptional": true, - "comment": "The index to diff with; repo index used if NULL." - }, - { - "name": "opts", - "cType": "const git_diff_options *", - "cppClassName": "GitDiffOptions", - "jsClassName": "DiffOptions", - "isOptional": true, - "comment": "Structure with options to influence diff or NULL for defaults." - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "diffIndex", - "cppFunctionName": "DiffIndex", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Create a diff list between a tree and repository index.

\n" - }, - { - "cFunctionName": "git_diff_tree_to_workdir", - "args": [ - { - "name": "diff", - "cType": "git_diff_list **", - "cppClassName": "GitDiffList", - "jsClassName": "DiffList", - "isReturn": true, - "comment": "A pointer to a git_diff_list pointer that will be allocated." - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "The repository containing the tree." - }, - { - "name": "old_tree", - "cType": "git_tree *", - "cppClassName": "GitTree", - "jsClassName": "Tree", - "isSelf": true, - "comment": "A git_tree object to diff from, or NULL for empty tree." - }, - { - "name": "opts", - "cType": "const git_diff_options *", - "cppClassName": "GitDiffOptions", - "jsClassName": "DiffOptions", - "isOptional": true, - "comment": "Structure with options to influence diff or NULL for defaults." - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "diffWorkDir", - "cppFunctionName": "DiffWorkDir", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Create a diff list between a tree and the working directory.

\n" - } - ] - }, - { - "filename": "tree_builder.h", - "dependencies": [ - "../include/repo.h", - "../include/oid.h", - "../include/tree_entry.h", - "../include/tree.h", - "../include/diff_list.h", - "../include/diff_options.h", - "../include/index.h" - ], - "jsClassName": "TreeBuilder", - "cppClassName": "GitTreeBuilder", - "cType": "git_treebuilder", - "freeFunctionName": "git_treebuilder_free", - "functions": [ - { - "cFunctionName": "git_treebuilder_create", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "git_treebuilder **", - "cppClassName": "GitTreeBuilder", - "jsClassName": "TreeBuilder", - "comment": "Pointer where to store the tree builder" - }, - { - "name": "source", - "cType": "const git_tree *", - "cppClassName": "GitTree", - "jsClassName": "Tree", - "isOptional": true, - "comment": "Source tree to initialize the builder (optional)" - } - ], - "isAsync": false, - "isConstructorMethod": true, - "isPrototypeMethod": false, - "jsFunctionName": "create", - "cppFunctionName": "Create", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 on success; error code otherwise", - "jsClassName": "Number" - }, - "description": "

Create a new tree builder.

\n" - }, - { - "cFunctionName": "git_treebuilder_clear", - "args": [ - { - "name": "bld", - "cType": "git_treebuilder *", - "cppClassName": "GitTreeBuilder", - "jsClassName": "TreeBuilder", - "comment": "Builder to clear" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "clear", - "cppFunctionName": "Clear", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Clear all the entires in the builder

\n" - }, - { - "cFunctionName": "git_treebuilder_entrycount", - "args": [ - { - "name": "bld", - "cType": "git_treebuilder *", - "cppClassName": "GitTreeBuilder", - "jsClassName": "TreeBuilder", - "isSelf": true, - "comment": "a previously loaded treebuilder." - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "size", - "cppFunctionName": "Size", - "return": { - "cType": "unsigned int", - "cppClassName": "Uint32", - "comment": "the number of entries in the treebuilder", - "jsClassName": "Number" - }, - "description": "

Get the number of entries listed in a treebuilder

\n" - }, - { - "cFunctionName": "git_treebuilder_free", - "args": [ - { - "name": "bld", - "cType": "git_treebuilder *", - "cppClassName": "GitTreeBuilder", - "jsClassName": "TreeBuilder", - "comment": "Builder to free" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "isFree": true, - "jsFunctionName": "treebuilderFree", - "cppFunctionName": "GitTreebuilderFree", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Free a tree builder

\n" - }, - { - "cFunctionName": "git_treebuilder_get", - "args": [ - { - "name": "bld", - "cType": "git_treebuilder *", - "cppClassName": "GitTreeBuilder", - "jsClassName": "TreeBuilder", - "isSelf": true, - "comment": "Tree builder" - }, - { - "name": "filename", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "Name of the entry" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "get", - "cppFunctionName": "Get", - "return": { - "cType": "const git_tree_entry *", - "cppClassName": "GitTreeEntry", - "copy": "git_tree_entry_dup", - "comment": "pointer to the entry; NULL if not found", - "jsClassName": "TreeEntry" - }, - "description": "

Get an entry from the builder from its filename

\n" - }, - { - "cFunctionName": "git_treebuilder_insert", - "args": [ - { - "name": "out", - "isReturn": true, - "cType": "const git_tree_entry **", - "cppClassName": "GitTreeEntry", - "jsClassName": "TreeEntry", - "copy": "git_tree_entry_dup", - "comment": "Pointer to store the entry (optional)" - }, - { - "name": "bld", - "cType": "git_treebuilder *", - "cppClassName": "GitTreeBuilder", - "jsClassName": "TreeBuilder", - "isSelf": true, - "comment": "Tree builder" - }, - { - "name": "filename", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "Filename of the entry" - }, - { - "name": "id", - "cType": "const git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "comment": "SHA1 oid of the entry" - }, - { - "name": "filemode", - "cType": "git_filemode_t", - "cppClassName": "Number", - "jsClassName": "Number", - "comment": "Folder attributes of the entry. This parameter must be valued with one of the following entries: 0040000, 0100644, 0100755, 0120000 or 0160000.", - "additionalCast": "(int)" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "insert", - "cppFunctionName": "Insert", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "comment": "0 or an error code", - "jsClassName": "Number" - }, - "description": "

Add or update an entry to the builder

\n" - }, - { - "cFunctionName": "git_treebuilder_remove", - "args": [ - { - "name": "bld", - "cType": "git_treebuilder *", - "cppClassName": "GitTreeBuilder", - "jsClassName": "TreeBuilder", - "isSelf": true, - "comment": "Tree builder" - }, - { - "name": "filename", - "cType": "const char *", - "cppClassName": "String", - "jsClassName": "String", - "comment": "Filename of the entry to remove" - } - ], - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "gitTreebuilderRemove", - "cppFunctionName": "GitTreebuilderRemove", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number" - }, - "description": "

Remove an entry from the builder by its filename

\n" - }, - { - "cFunctionName": "git_treebuilder_filter", - "args": [ - { - "name": "bld", - "cType": "git_treebuilder *", - "cppClassName": "GitTreeBuilder", - "jsClassName": "TreeBuilder", - "isSelf": true, - "comment": "Tree builder" - }, - { - "name": "filter", - "cType": "git_treebuilder_filter_cb", - "cppClassName": "TreebuilderFilterCb", - "jsClassName": "TreebuilderFilterCb", - "comment": "Callback to filter entries" - }, - { - "name": "payload", - "cType": "void *", - "cppClassName": "void", - "jsClassName": "void", - "comment": "Extra data to pass to filter" - } - ], - "ignore": true, - "isAsync": false, - "isConstructorMethod": false, - "isPrototypeMethod": false, - "jsFunctionName": "filter", - "cppFunctionName": "Filter", - "return": { - "cType": "void", - "cppClassName": "void", - "jsClassName": "void" - }, - "description": "

Filter the entries in the tree

\n" - }, - { - "cFunctionName": "git_treebuilder_write", - "args": [ - { - "name": "id", - "cType": "git_oid *", - "cppClassName": "GitOid", - "jsClassName": "Oid", - "shouldAlloc": true, - "isReturn": true, - "comment": "Pointer to store the OID of the newly written tree" - }, - { - "name": "repo", - "cType": "git_repository *", - "cppClassName": "GitRepo", - "jsClassName": "Repository", - "comment": "Repository in which to store the object" - }, - { - "name": "bld", - "cType": "git_treebuilder *", - "cppClassName": "GitTreeBuilder", - "jsClassName": "TreeBuilder", - "isSelf": true, - "comment": "Tree builder to write" - } - ], - "isAsync": true, - "isConstructorMethod": false, - "isPrototypeMethod": true, - "jsFunctionName": "write", - "cppFunctionName": "Write", - "return": { - "cType": "int", - "cppClassName": "Int32", - "isErrorCode": true, - "jsClassName": "Number", - "comment": "0 or an error code" - }, - "description": "

Write the contents of the tree builder as a tree object

\n" - } - ] - } -] diff --git a/doc/Theme.css b/doc/Theme.css deleted file mode 100644 index 111dd6573..000000000 --- a/doc/Theme.css +++ /dev/null @@ -1,796 +0,0 @@ -@import('http://fonts.googleapis.com/css?family=EB+Garamond'); - -body { - background-color: #FFFFFF; - font-family: Georgia, sans-serif; - font-size: 14px; - margin: 40px; -} - -a:link, -a:visited { color: #900000; text-decoration: none } -a:hover { color: #900000; text-decoration: underline } -a:active { color: #FF0000; text-decoration: underline } - -td { - vertical-align: top } - -img { border: 0; } - - -/* - Comment out this line to use web-style paragraphs (blank line between - paragraphs, no indent) instead of print-style paragraphs (no blank line, - indented.) -*/ -p { - text-indent: 5ex; margin: 0 } - - -/* Opera doesn't break with just wbr, but will if you add this. */ -.Opera wbr:after { - content: "\00200B"; - } - - -/* Blockquotes are used as containers for things that may need to scroll. */ -blockquote { - padding: 0; - margin: 0; - overflow: auto; - } - - -.Firefox1 blockquote { - padding-bottom: .5em; - } - -/* Turn off scrolling when printing. */ -@media print { - blockquote { - overflow: visible; - } - .IE blockquote { - width: auto; - } - } - - - -#Menu { - padding: 10px 0 0 0; - } -.ContentPage #Menu, -.IndexPage #Menu { - position: absolute; - top: 0; - left: 0; - width: 31ex; - overflow: hidden; - } -.ContentPage .Firefox #Menu, -.IndexPage .Firefox #Menu { - width: 27ex; - } - - - .MTitle { - font-size: 16pt; font-weight: bold; font-variant: small-caps; - text-align: center; - padding: 5px 10px 15px 10px; - border-bottom: 1px dotted #000000; - margin-bottom: 15px } - - .MSubTitle { - font-size: 9pt; font-weight: normal; font-variant: normal; - margin-top: 1ex; margin-bottom: 5px } - - - .MEntry a:link, - .MEntry a:hover, - .MEntry a:visited { color: #606060; margin-right: 0 } - .MEntry a:active { color: #A00000; margin-right: 0 } - - - .MGroup { - font-variant: small-caps; font-weight: bold; - margin: 1em 0 1em 10px; - } - - .MGroupContent { - font-variant: normal; font-weight: normal } - - .MGroup a:link, - .MGroup a:hover, - .MGroup a:visited { color: #545454; margin-right: 10px } - .MGroup a:active { color: #A00000; margin-right: 10px } - - - .MFile, - .MText, - .MLink, - .MIndex { - padding: 1px 17px 2px 10px; - margin: .25em 0 .25em 0; - } - - .MText { - font-size: 8pt; font-style: italic } - - .MLink { - font-style: italic } - - #MSelected { - color: #000000; background-color: #FFFFFF; - /* Replace padding with border. */ - padding: 0 10px 0 10px; - border-width: 1px 2px 2px 0; border-style: solid; border-color: #000000; - margin-right: 5px; - } - - /* Close off the left side when its in a group. */ - .MGroup #MSelected { - padding-left: 9px; border-left-width: 1px } - - /* A treat for Mozilla users. Blatantly non-standard. Will be replaced with CSS 3 attributes when finalized/supported. */ - .Firefox #MSelected { - -moz-border-radius-topright: 10px; - -moz-border-radius-bottomright: 10px } - .Firefox .MGroup #MSelected { - -moz-border-radius-topleft: 10px; - -moz-border-radius-bottomleft: 10px } - - - #MSearchPanel { - padding: 0px 6px; - margin: .25em 0; - } - - - #MSearchField { - font: italic 9pt Verdana, sans-serif; - color: #606060; - background-color: #E8E8E8; - border: none; - padding: 2px 4px; - width: 100%; - } - /* Only Opera gets it right. */ - .Firefox #MSearchField, - .IE #MSearchField, - .Safari #MSearchField { - width: 94%; - } - .Opera9 #MSearchField, - .Konqueror #MSearchField { - width: 97%; - } - .FramedMenuPage .Firefox #MSearchField, - .FramedMenuPage .Safari #MSearchField, - .FramedMenuPage .Konqueror #MSearchField { - width: 98%; - } - - /* Firefox doesn't do this right in frames without #MSearchPanel added on. - It's presence doesn't hurt anything other browsers. */ - #MSearchPanel.MSearchPanelInactive:hover #MSearchField { - background-color: #FFFFFF; - border: 1px solid #C0C0C0; - padding: 1px 3px; - } - .MSearchPanelActive #MSearchField { - background-color: #FFFFFF; - border: 1px solid #C0C0C0; - font-style: normal; - padding: 1px 3px; - } - - #MSearchType { - visibility: hidden; - font: 8pt Verdana, sans-serif; - width: 98%; - padding: 0; - border: 1px solid #C0C0C0; - } - .MSearchPanelActive #MSearchType, - /* As mentioned above, Firefox doesn't do this right in frames without #MSearchPanel added on. */ - #MSearchPanel.MSearchPanelInactive:hover #MSearchType, - #MSearchType:focus { - visibility: visible; - color: #606060; - } - #MSearchType option#MSearchEverything { - font-weight: bold; - } - - .Opera8 .MSearchPanelInactive:hover, - .Opera8 .MSearchPanelActive { - margin-left: -1px; - } - - - iframe#MSearchResults { - width: 60ex; - height: 15em; - } - #MSearchResultsWindow { - display: none; - position: absolute; - left: 0; top: 0; - border: 1px solid #000000; - background-color: #E8E8E8; - } - #MSearchResultsWindowClose { - font-weight: bold; - font-size: 8pt; - display: block; - padding: 2px 5px; - } - #MSearchResultsWindowClose:link, - #MSearchResultsWindowClose:visited { - color: #000000; - text-decoration: none; - } - #MSearchResultsWindowClose:active, - #MSearchResultsWindowClose:hover { - color: #800000; - text-decoration: none; - background-color: #F4F4F4; - } - - - - -#Content { - padding-bottom: 15px; - } - -.ContentPage #Content { - border-width: 0 0 1px 1px; - border-style: solid; - border-color: #000000; - background-color: #FFFFFF; - font-size: 9pt; /* To make 31ex match the menu's 31ex. */ - margin-left: 31ex; - } -.ContentPage .Firefox #Content { - margin-left: 27ex; - } - - - - .CTopic { - font-size: 10pt; - margin-bottom: 3em; - } - - - .CTitle { - font-size: 12pt; font-weight: bold; - border-width: 0 0 1px 0; border-style: solid; border-color: #A0A0A0; - margin: 0 15px .5em 15px } - - .CGroup .CTitle { - font-size: 16pt; font-variant: small-caps; - padding-left: 15px; padding-right: 15px; - border-width: 0 0 2px 0; border-color: #000000; - margin-left: 0; margin-right: 0 } - - .CClass .CTitle, - .CInterface .CTitle, - .CDatabase .CTitle, - .CDatabaseTable .CTitle, - .CSection .CTitle { - font-size: 18pt; - color: #FFFFFF; background-color: #A0A0A0; - padding: 10px 15px 10px 15px; - border-width: 2px 0; border-color: #000000; - margin-left: 0; margin-right: 0 } - - #MainTopic .CTitle { - font-size: 20pt; - color: #FFFFFF; background-color: #7070C0; - padding: 10px 15px 10px 15px; - border-width: 0 0 3px 0; border-color: #000000; - margin-left: 0; margin-right: 0 } - - .CBody { - margin-left: 15px; margin-right: 15px } - - - .CToolTip { - position: absolute; visibility: hidden; - left: 0; top: 0; - background-color: #FFFFE0; - padding: 5px; - border-width: 1px 2px 2px 1px; border-style: solid; border-color: #000000; - font-size: 8pt; - } - - .Opera .CToolTip { - max-width: 98%; - } - - /* Scrollbars would be useless. */ - .CToolTip blockquote { - overflow: hidden; - } - .IE6 .CToolTip blockquote { - overflow: visible; - } - - .CHeading { - font-weight: bold; font-size: 10pt; - margin: 1.5em 0 .5em 0; - } - - .CBody pre { - font: 10pt "Courier New", Courier, monospace; - background-color: #FCFCFC; - margin: 1em 35px; - padding: 10px 15px 10px 10px; - border-color: #E0E0E0 #E0E0E0 #E0E0E0 #E4E4E4; - border-width: 1px 1px 1px 6px; - border-style: dashed dashed dashed solid; - } - - .CBody ul { - /* I don't know why CBody's margin doesn't apply, but it's consistent across browsers so whatever. - Reapply it here as padding. */ - padding-left: 15px; padding-right: 15px; - margin: .5em 5ex .5em 5ex; - } - - .CDescriptionList { - margin: .5em 5ex 0 5ex } - - .CDLEntry { - font: 10pt "Courier New", Courier, monospace; color: #808080; - padding-bottom: .25em; - white-space: nowrap } - - .CDLDescription { - font-size: 10pt; /* For browsers that don't inherit correctly, like Opera 5. */ - padding-bottom: .5em; padding-left: 5ex } - - - .CTopic img { - text-align: center; - display: block; - margin: 1em auto; - } - .CImageCaption { - font-variant: small-caps; - font-size: 8pt; - color: #808080; - text-align: center; - position: relative; - top: 1em; - } - - .CImageLink { - color: #808080; - font-style: italic; - } - a.CImageLink:link, - a.CImageLink:visited, - a.CImageLink:hover { color: #808080 } - - - - - -.Prototype { - font: 10pt "Courier New", Courier, monospace; - padding: 5px 3ex; - border-width: 1px; border-style: solid; - margin: 0 5ex 1.5em 5ex; - } - - .Prototype td { - font-size: 10pt; - } - - .PDefaultValue, - .PDefaultValuePrefix, - .PTypePrefix { - color: #8F8F8F; - } - .PTypePrefix { - text-align: right; - } - .PAfterParameters { - vertical-align: bottom; - } - - .IE .Prototype table { - padding: 0; - } - - .CFunction .Prototype { - background-color: #F4F4F4; border-color: #D0D0D0 } - .CProperty .Prototype { - background-color: #F4F4FF; border-color: #C0C0E8 } - .CVariable .Prototype { - background-color: #FFFFF0; border-color: #E0E0A0 } - - .CClass .Prototype { - border-width: 1px 2px 2px 1px; border-style: solid; border-color: #A0A0A0; - background-color: #F4F4F4; - } - .CInterface .Prototype { - border-width: 1px 2px 2px 1px; border-style: solid; border-color: #A0A0D0; - background-color: #F4F4FF; - } - - .CDatabaseIndex .Prototype, - .CConstant .Prototype { - background-color: #D0D0D0; border-color: #000000 } - .CType .Prototype, - .CEnumeration .Prototype { - background-color: #FAF0F0; border-color: #E0B0B0; - } - .CDatabaseTrigger .Prototype, - .CEvent .Prototype, - .CDelegate .Prototype { - background-color: #F0FCF0; border-color: #B8E4B8 } - - .CToolTip .Prototype { - margin: 0 0 .5em 0; - white-space: nowrap; - } - - - - - -.Summary { - margin: 1.5em 5ex 0 5ex } - - .STitle { - font-size: 12pt; font-weight: bold; - margin-bottom: .5em } - - - .SBorder { - background-color: #FFFFF0; - padding: 15px; - border: 1px solid #C0C060 } - - /* In a frame IE 6 will make them too long unless you set the width to 100%. Without frames it will be correct without a width - or slightly too long (but not enough to scroll) with a width. This arbitrary weirdness simply astounds me. IE 7 has the same - problem with frames, haven't tested it without. */ - .FramedContentPage .IE .SBorder { - width: 100% } - - /* A treat for Mozilla users. Blatantly non-standard. Will be replaced with CSS 3 attributes when finalized/supported. */ - .Firefox .SBorder { - -moz-border-radius: 20px } - - - .STable { - font-size: 9pt; width: 100% } - - .SEntry { - width: 30% } - .SDescription { - width: 70% } - - - .SMarked { - background-color: #F8F8D8 } - - .SDescription { padding-left: 2ex } - .SIndent1 .SEntry { padding-left: 1.5ex } .SIndent1 .SDescription { padding-left: 3.5ex } - .SIndent2 .SEntry { padding-left: 3.0ex } .SIndent2 .SDescription { padding-left: 5.0ex } - .SIndent3 .SEntry { padding-left: 4.5ex } .SIndent3 .SDescription { padding-left: 6.5ex } - .SIndent4 .SEntry { padding-left: 6.0ex } .SIndent4 .SDescription { padding-left: 8.0ex } - .SIndent5 .SEntry { padding-left: 7.5ex } .SIndent5 .SDescription { padding-left: 9.5ex } - - .SDescription a { color: #800000} - .SDescription a:active { color: #A00000 } - - .SGroup td { - padding-top: .5em; padding-bottom: .25em } - - .SGroup .SEntry { - font-weight: bold; font-variant: small-caps } - - .SGroup .SEntry a { color: #800000 } - .SGroup .SEntry a:active { color: #F00000 } - - - .SMain td, - .SClass td, - .SDatabase td, - .SDatabaseTable td, - .SSection td { - font-size: 10pt; - padding-bottom: .25em } - - .SClass td, - .SDatabase td, - .SDatabaseTable td, - .SSection td { - padding-top: 1em } - - .SMain .SEntry, - .SClass .SEntry, - .SDatabase .SEntry, - .SDatabaseTable .SEntry, - .SSection .SEntry { - font-weight: bold; - } - - .SMain .SEntry a, - .SClass .SEntry a, - .SDatabase .SEntry a, - .SDatabaseTable .SEntry a, - .SSection .SEntry a { color: #000000 } - - .SMain .SEntry a:active, - .SClass .SEntry a:active, - .SDatabase .SEntry a:active, - .SDatabaseTable .SEntry a:active, - .SSection .SEntry a:active { color: #A00000 } - - - - - -.ClassHierarchy { - margin: 0 15px 1em 15px } - - .CHEntry { - border-width: 1px 2px 2px 1px; border-style: solid; border-color: #A0A0A0; - margin-bottom: 3px; - padding: 2px 2ex; - font-size: 10pt; - background-color: #F4F4F4; color: #606060; - } - - .Firefox .CHEntry { - -moz-border-radius: 4px; - } - - .CHCurrent .CHEntry { - font-weight: bold; - border-color: #000000; - color: #000000; - } - - .CHChildNote .CHEntry { - font-style: italic; - font-size: 8pt; - } - - .CHIndent { - margin-left: 3ex; - } - - .CHEntry a:link, - .CHEntry a:visited, - .CHEntry a:hover { - color: #606060; - } - .CHEntry a:active { - color: #800000; - } - - - - - -#Index { - background-color: #FFFFFF; - } - -/* As opposed to .PopupSearchResultsPage #Index */ -.IndexPage #Index, -.FramedIndexPage #Index, -.FramedSearchResultsPage #Index { - padding: 15px; - } - -.IndexPage #Index { - border-width: 0 0 1px 1px; - border-style: solid; - border-color: #000000; - font-size: 9pt; /* To make 27ex match the menu's 27ex. */ - margin-left: 27ex; - } - - - .IPageTitle { - font-size: 20pt; font-weight: bold; - color: #FFFFFF; background-color: #7070C0; - padding: 10px 15px 10px 15px; - border-width: 0 0 3px 0; border-color: #000000; border-style: solid; - margin: -15px -15px 0 -15px } - - .FramedSearchResultsPage .IPageTitle { - margin-bottom: 15px; - } - - .INavigationBar { - font-size: 10pt; - text-align: center; - background-color: #FFFFF0; - padding: 5px; - border-bottom: solid 1px black; - margin: 0 -15px 15px -15px; - } - - .INavigationBar a { - font-weight: bold } - - .IHeading { - font-size: 16pt; font-weight: bold; - padding: 2.5em 0 .5em 0; - text-align: center; - width: 3.5ex; - } - #IFirstHeading { - padding-top: 0; - } - - .IEntry { - font-size: 10pt; - padding-left: 1ex; - } - .PopupSearchResultsPage .IEntry { - font-size: 8pt; - padding: 1px 5px; - } - .PopupSearchResultsPage .Opera9 .IEntry, - .FramedSearchResultsPage .Opera9 .IEntry { - text-align: left; - } - .FramedSearchResultsPage .IEntry { - padding: 0; - } - - .ISubIndex { - padding-left: 3ex; padding-bottom: .5em } - .PopupSearchResultsPage .ISubIndex { - display: none; - } - - /* While it may cause some entries to look like links when they aren't, I found it's much easier to read the - index if everything's the same color. */ - .ISymbol { - font-weight: bold; color: #900000 } - - .IndexPage .ISymbolPrefix, - .FramedIndexPage .ISymbolPrefix { - font-size: 10pt; - text-align: right; - color: #C47C7C; - background-color: #F8F8F8; - border-right: 3px solid #E0E0E0; - border-left: 1px solid #E0E0E0; - padding: 0 1px 0 2px; - } - .PopupSearchResultsPage .ISymbolPrefix, - .FramedSearchResultsPage .ISymbolPrefix { - color: #900000; - } - .PopupSearchResultsPage .ISymbolPrefix { - font-size: 8pt; - } - - .IndexPage #IFirstSymbolPrefix, - .FramedIndexPage #IFirstSymbolPrefix { - border-top: 1px solid #E0E0E0; - } - .IndexPage #ILastSymbolPrefix, - .FramedIndexPage #ILastSymbolPrefix { - border-bottom: 1px solid #E0E0E0; - } - .IndexPage #IOnlySymbolPrefix, - .FramedIndexPage #IOnlySymbolPrefix { - border-top: 1px solid #E0E0E0; - border-bottom: 1px solid #E0E0E0; - } - - a.IParent, - a.IFile { - display: block; - } - - .PopupSearchResultsPage .SRStatus { - padding: 2px 5px; - font-size: 8pt; - font-style: italic; - } - .FramedSearchResultsPage .SRStatus { - font-size: 10pt; - font-style: italic; - } - - .SRResult { - display: none; - } - - - -#Footer { - font-size: 8pt; - color: #989898; - text-align: right; - } - -#Footer p { - text-indent: 0; - margin-bottom: .5em; - } - -.ContentPage #Footer, -.IndexPage #Footer { - text-align: right; - margin: 2px; - } - -.FramedMenuPage #Footer { - text-align: center; - margin: 5em 10px 10px 10px; - padding-top: 1em; - border-top: 1px solid #C8C8C8; - } - - #Footer a:link, - #Footer a:hover, - #Footer a:visited { color: #989898 } - #Footer a:active { color: #A00000 } - - - -.prettyprint .kwd { color: #800000; } /* keywords */ - - .prettyprint.PDefaultValue .kwd, - .prettyprint.PDefaultValuePrefix .kwd, - .prettyprint.PTypePrefix .kwd { - color: #C88F8F; - } - -.prettyprint .com { color: #008000; } /* comments */ - - .prettyprint.PDefaultValue .com, - .prettyprint.PDefaultValuePrefix .com, - .prettyprint.PTypePrefix .com { - color: #8FC88F; - } - -.prettyprint .str { color: #0000B0; } /* strings */ -.prettyprint .lit { color: #0000B0; } /* literals */ - - .prettyprint.PDefaultValue .str, - .prettyprint.PDefaultValuePrefix .str, - .prettyprint.PTypePrefix .str, - .prettyprint.PDefaultValue .lit, - .prettyprint.PDefaultValuePrefix .lit, - .prettyprint.PTypePrefix .lit { - color: #8F8FC0; - } - -.prettyprint .typ { color: #000000; } /* types */ -.prettyprint .pun { color: #000000; } /* punctuation */ -.prettyprint .pln { color: #000000; } /* punctuation */ - - .prettyprint.PDefaultValue .typ, - .prettyprint.PDefaultValuePrefix .typ, - .prettyprint.PTypePrefix .typ, - .prettyprint.PDefaultValue .pun, - .prettyprint.PDefaultValuePrefix .pun, - .prettyprint.PTypePrefix .pun, - .prettyprint.PDefaultValue .pln, - .prettyprint.PDefaultValuePrefix .pln, - .prettyprint.PTypePrefix .pln { - color: #8F8F8F; - } - -.prettyprint .tag { color: #008; } -.prettyprint .atn { color: #606; } -.prettyprint .atv { color: #080; } -.prettyprint .dec { color: #606; } - diff --git a/example/add-and-commit.js b/example/add-and-commit.js deleted file mode 100644 index d9c5da2b7..000000000 --- a/example/add-and-commit.js +++ /dev/null @@ -1,59 +0,0 @@ -var git = require('../'), - path = require('path'), - fs = require('fs'), - fileName = 'newfile.txt', - fileContent = 'hello world' - ; - -/** - * This example creates a certain file `newfile.txt`, adds it to the git index and - * commits it to head. Similar to a `git add newfile.txt` followed by a `git commit` -**/ - -//open a git repo -git.Repo.open(path.resolve(__dirname, '../.git'), function(openReporError, repo) { - if (openReporError) throw openReporError; - - //create the file in the repo's workdir - fs.writeFile(path.join(repo.workdir(), fileName), fileContent, function(writeError) { - if (writeError) throw writeError; - - //add the file to the index... - repo.openIndex(function(openIndexError, index) { - if (openIndexError) throw openIndexError; - - index.read(function(readError) { - if (readError) throw readError; - - index.addByPath(fileName, function(addByPathError) { - if (addByPathError) throw addByPathError; - - index.write(function(writeError) { - if (writeError) throw writeError; - - index.writeTree(function(writeTreeError, oid) { - if (writeTreeError) throw writeTreeError; - - //get HEAD - git.Reference.oidForName(repo, 'HEAD', function(oidForName, head) { - if (oidForName) throw oidForName; - - //get latest commit (will be the parent commit) - repo.getCommit(head, function(getCommitError, parent) { - if (getCommitError) throw getCommitError; - var author = git.Signature.create("Scott Chacon", "schacon@gmail.com", 123456789, 60); - var committer = git.Signature.create("Scott A Chacon", "scott@github.com", 987654321, 90); - - //commit - repo.createCommit('HEAD', author, committer, 'message', oid, [parent], function(error, commitId) { - console.log("New Commit:", commitId.sha()); - }); - }); - }); - }); - }); - }); - }); - }); - }); -}); \ No newline at end of file diff --git a/example/apps/git_profanity_check.js b/example/apps/git_profanity_check.js deleted file mode 100644 index 7f0674849..000000000 --- a/example/apps/git_profanity_check.js +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env node -// vim: ft=javascript - -// Copyright 2011-2014, Tim Branyen @tbranyen -// Dual licensed under the MIT and GPL licenses. -// Script to detect cursewords in commit messages and provide the offending -// commit sha's. -// -// Usage: -// -// node git_profanity_check some/repo/.git -// -var git = require('../../'); - -var curses = ['put', 'curse', 'words', 'here']; -var path = './.git'; -var branch = 'master'; -var reCurse = new RegExp('\\b(?:' + curses.join('|') + ')\\b', 'gi'); - -// Default path is `.git`. -if (process.argv.length < 3) { - console.log('No path passed as argument, defaulting to .git.'); -} -// Otherwise defaults. -else { - path = process.argv[2]; - - // Set repo branch - if (process.argv.length < 4) { - console.log('No branch passed as argument, defaulting to master.'); - } - else { - branch = process.argv[3]; - } -} - -// Open repository. -git.Repo.open(path, function(err, repo) { - if (err) { - throw new Error(err); - } - - // Open branch, default to master. - repo.getBranch(branch, function(err, branch) { - if (err) { - throw new Error(err); - } - - // Iterate history - var history = branch.history(); - - // Iterate over every commit message and test for words. - history.on('commit', function(commit) { - var message = commit.message(); - - if (reCurse.test(message)) { - console.log('Curse detected in commit', commit.sha()); - console.log('=> ', message); - return; - } - }); - - // Start history iteration. - history.start(); - }); -}); diff --git a/example/clone.js b/example/clone.js deleted file mode 100644 index 75f8b3603..000000000 --- a/example/clone.js +++ /dev/null @@ -1,27 +0,0 @@ -var git = require('../'), - rimraf = require('rimraf'), - path = "/tmp/nodegit-clone-demo"; - -rimraf(path, function() { - git.Repo.clone("https://github.com/nodegit/nodegit.git", path, null, function(error, repo) { - if (error) throw error; - - repo.getCommit('59b20b8d5c6ff8d09518454d4dd8b7b30f095ab5', function(error, commit) { - if (error) throw error; - - commit.getEntry('README.md', function(error, entry) { - if (error) throw error; - - entry.getBlob(function(error, blob) { - if (error) throw error; - - console.log(entry.name(), entry.sha(), blob.size() + 'b'); - console.log('========================================================\n\n'); - var firstTenLines = blob.toString().split('\n').slice(0, 10).join('\n'); - console.log(firstTenLines); - console.log('...'); - }); - }); - }); - }); -}); \ No newline at end of file diff --git a/example/diff-commits.js b/example/diff-commits.js deleted file mode 100644 index edd05b3eb..000000000 --- a/example/diff-commits.js +++ /dev/null @@ -1,35 +0,0 @@ -var git = require('../'), - path = require('path'); - -// This code examines the diffs between a particular commit and all of its -// parents. Since this commit is not a merge, it only has one parent. This is -// similar to doing `git show`. - -git.Repo.open(path.resolve(__dirname, '../.git'), function(error, repo) { - if (error) throw error; - - repo.getCommit('59b20b8d5c6ff8d09518454d4dd8b7b30f095ab5', function(error, commit) { - if (error) throw error; - - console.log('commit ' + commit.sha()); - console.log('Author:', commit.author().name() + ' <' + commit.author().email() + '>'); - console.log('Date:', commit.date()); - console.log('\n ' + commit.message()); - - commit.getDiff(function(error, diffList) { - if (error) throw error; - - diffList.forEach(function(diff) { - diff.patches().forEach(function(patch) { - console.log("diff", patch.oldFile().path(), patch.newFile().path()); - patch.hunks().forEach(function(hunk) { - console.log(hunk.header().trim()); - hunk.lines().forEach(function(line) { - console.log(String.fromCharCode(line.lineOrigin) + line.content.trim()); - }); - }); - }); - }); - }); - }); -}); diff --git a/example/fetch.js b/example/fetch.js deleted file mode 100644 index 2eeffe327..000000000 --- a/example/fetch.js +++ /dev/null @@ -1,17 +0,0 @@ -var git = require('../'), - path = require('path'); - -git.Repo.open(path.resolve(__dirname, '../.git'), function(error, repo) { - if (error) throw error; - - var remote = repo.getRemote("origin"); - remote.connect(0, function(error) { - if (error) throw error; - - remote.download(null, function(error) { - if (error) throw error; - - console.log("It worked!"); - }) - }); -}); diff --git a/example/general.js b/example/general.js deleted file mode 100644 index 559a42861..000000000 --- a/example/general.js +++ /dev/null @@ -1,329 +0,0 @@ -var git = require('../'), - path = require('path'); - -// **nodegit** is a javascript library for node.js that wraps libgit2, a -// pure C implementation of the Git core. It provides an asynchronous -// interface around any functions that do I/O, and a sychronous interface -// around the rest. -// -// This file is an example of using that API in a real, JS file. -// -// **libgit2** (for the most part) only implements the core plumbing -// functions, not really the higher level porcelain stuff. For a primer on -// Git Internals that you will need to know to work with Git at this level, -// check out [Chapter 9][pg] of the Pro Git book. - -// Nearly, all git operations in the context of a repository. -// To open a repository, - -git.Repo.open(path.resolve(__dirname, '../.git'), function(error, repo) { - // For all of the following examples, error-handling will be performed in - // this naive way: - if (error) throw error; - console.log("Opened repository."); - - // ### SHA-1 Value Conversions - - // Objects in git (commits, blobs, etc.) are referred to by their SHA value - // **nodegit** uses a simple wrapper around hash values called an `Oid`. - // The oid validates that the SHA is well-formed. - - var oid = git.Oid.fromString('c27d9c35e3715539d941254f2ce57042b978c49c'); - - // Most functions in in **nodegit** that take an oid will also take a - // string, so for example, you can look up a commit by a string SHA or - // an Oid, but but any functions that create new SHAs will always return - // an Oid. - - // If you have a oid, you can easily get the hex value of the SHA again. - console.log("Sha hex string:", oid.sha()); - - // ### Working with the Object Database - - // **libgit2** provides [direct access][odb] to the object database. The - // object database is where the actual objects are stored in Git. For - // working with raw objects, we'll need to get this structure from the - // repository. - var odb = repo.odb(); - - // We can read raw objects directly from the object database if we have - // the oid (SHA) of the object. This allows us to access objects without - // knowing thier type and inspect the raw bytes unparsed. - - odb.read(oid, function(error, object) { - if (error) throw error; - - // A raw object only has three properties - the type (commit, blob, tree - // or tag), the size of the raw data and the raw, unparsed data itself. - // For a commit or tag, that raw data is human readable plain ASCII - // text. For a blob it is just file contents, so it could be text or - // binary data. For a tree it is a special binary format, so it's unlikely - // to be hugely helpful as a raw object. - var data = object.data(), - type = object.type(); - - console.log("Object size and type:", object.size(), object.type()); - }); - - // You can also write raw object data to Git. This is pretty cool because - // it gives you direct access to the key/value properties of Git. Here - // we'll write a new blob object that just contains a simple string. - // Notice that we have to specify the object type. - odb.write("test data", "test data".length, git.Object.Type.Blob, function(error, oid) { - if (error) throw error; - - // Now that we've written the object, we can check out what SHA1 was - // generated when the object was written to our database. - console.log("Written Object: ", oid.sha()); - }); - - // ### Object Parsing - - // libgit2 has methods to parse every object type in Git so you don't have - // to work directly with the raw data. This is much faster and simpler - // than trying to deal with the raw data yourself. - - // #### Commit Parsing - - // [Parsing commit objects][pco] is simple and gives you access to all the - // data in the commit - the author (name, email, datetime), committer - // (same), tree, message, encoding and parent(s). - - oid = git.Oid.fromString("698c74e817243efe441a5d1f3cbaf3998282ca86"); - - // Many methods in **nodegit** are asynchronous, because they do file - // or network I/O. By convention, all asynchronous methods are named - // imperatively, like `getCommit`, `open`, `read`, `write`, etc., whereas - // synchronous methods are named nominatively, like `type`, `size`, `name`. - - repo.getCommit(oid, function(error, commit) { - if (error) throw error; - - // Each of the properties of the commit object are accessible via methods, - // including commonly needed variations, such as `git_commit_time` which - // returns the author time and `git_commit_message` which gives you the - // commit message. - console.log("Commit:", commit.message(), commit.author().name(), commit.date()); - - // Commits can have zero or more parents. The first (root) commit will - // have no parents, most commits will have one (i.e. the commit it was - // based on) and merge commits will have two or more. Commits can - // technically have any number, though it's rare to have more than two. - commit.getParents(function(error, parents) { - parents.forEach(function(parent) { - console.log("Parent:", parent.oid().sha()); - }); - }); - }); - - // #### Writing Commits - - // nodegit provides a couple of methods to create commit objects easily as - // well. - - var author = git.Signature.create("Scott Chacon", "schacon@gmail.com", 123456789, 60); - var committer = git.Signature.create("Scott A Chacon", "scott@github.com", 987654321, 90); - - // Commit objects need a tree to point to and optionally one or more - // parents. Here we're creating oid objects to create the commit with, - // but you can also use existing ones: - - var treeId = git.Oid.fromString("4170d10f19600b9cb086504e8e05fe7d863358a2"); - var parentId = git.Oid.fromString("eebd0ead15d62eaf0ba276da53af43bbc3ce43ab"); - - repo.getTree(treeId, function(error, tree) { - if (error) throw error; - - repo.getCommit(parentId, function(error, parent) { - if (error) throw error; - // Here we actually create the commit object with a single call with all - // the values we need to create the commit. The SHA key is written to the - // `commit_id` variable here. - repo.createCommit( - null /* do not update the HEAD */, - author, - committer, - "example commit", - tree, - [parent], - function (error, oid) { - console.log("New Commit:", oid.sha()); - }); - }); - }); - - // #### Tag Parsing - - // You can parse and create tags with the [tag management API][tm], which - // functions very similarly to the commit lookup, parsing and creation - // methods, since the objects themselves are very similar. - - oid = git.Oid.fromString("43f0ac7359e30b769f6b1714e0adbaf51bedbb65"); - repo.getTag(oid, function(error, tag) { - if (error) throw error; - - // Now that we have the tag object, we can extract the information it - // generally contains: the target (usually a commit object), the type of - // the target object (usually 'commit'), the name ('v1.0'), the tagger (a - // git_signature - name, email, timestamp), and the tag message. - console.log(tag.name(), tag.targetType(), tag.message()); - - tag.getTarget(function (error, target) { - if (error) throw error; - - console.log("Target is commit:", target.isCommit()); - }); - }); - - - // #### Tree Parsing - - // A Tree is how Git represents the state of the filesystem - // at a given revision. In general, a tree corresponds to a directory, - // and files in that directory are either files (blobs) or directories. - - // [Tree parsing][tp] is a bit different than the other objects, in that - // we have a subtype which is the tree entry. This is not an actual - // object type in Git, but a useful structure for parsing and traversing - // tree entries. - - oid = git.Oid.fromString("e1b0c7ea57bfc5e30ec279402a98168a27838ac9"); - repo.getTree(oid, function(error, tree) { - if (error) throw error; - - console.log("Tree Size:", tree.size()); - function dfs(error, tree) { - tree.entries().forEach(function(entry) { - if (entry.isDirectory()) { - entry.getTree(dfs); - } else if (entry.isFile()) { - console.log("Tree Entry:", entry.name()); - } - }); - } - dfs(null, tree); - - // You can also access tree entries by path if you know the path of the - // entry you're looking for. - tree.getEntry("example/general.js", function(error, entry) { - if (error) throw error; - - // Entries which are files have blobs associated with them: - entry.getBlob(function(error, blob) { - console.log("Blob size:", blob.size()); - }); - }); - }); - - // #### Blob Parsing - - // The last object type is the simplest and requires the least parsing - // help. Blobs are just file contents and can contain anything, there is - // no structure to it. The main advantage to using the [simple blob - // api][ba] is that when you're creating blobs you don't have to calculate - // the size of the content. There is also a helper for reading a file - // from disk and writing it to the db and getting the oid back so you - // don't have to do all those steps yourself. - - oid = git.Oid.fromString("991c06b7b1ec6f939488427e4b41a4fa3e1edd5f"); - repo.getBlob(oid, function(error, blob) { - if (error) throw error; - - // You can access a node.js Buffer with the raw contents of the blob directly. - // Note that this buffer may not be contain ASCII data for certain blobs - // (e.g. binary files). - var buffer = blob.content(); - - // If you know that the blob is UTF-8, however, - console.log("Blob contents:", blob.toString().slice(0, 38)); - }); - - // ### Revwalking - - // The libgit2 [revision walking api][rw] provides methods to traverse the - // directed graph created by the parent pointers of the commit objects. - // Since all commits point back to the commit that came directly before - // them, you can walk this parentage as a graph and find all the commits - // that were ancestors of (reachable from) a given starting point. This - // can allow you to create `git log` type functionality. - - oid = git.Oid.fromString("698c74e817243efe441a5d1f3cbaf3998282ca86"); - - // To use the revwalker, create a new walker, tell it how you want to sort - // the output and then push one or more starting points onto the walker. - // If you want to emulate the output of `git log` you would push the SHA - // of the commit that HEAD points to into the walker and then start - // traversing them. You can also 'hide' commits that you want to stop at - // or not see any of their ancestors. So if you want to emulate `git log - // branch1..branch2`, you would push the oid of `branch2` and hide the oid - // of `branch1`. - var revWalk = repo.createRevWalk(); - revWalk.sorting(git.RevWalk.Sort.Topological, git.RevWalk.Sort.Reverse); - revWalk.push(oid, function(error) { - if (error) throw error; - - // Now that we have the starting point pushed onto the walker, we start - // asking for ancestors. It will return them in the sorting order we asked - // for as commit oids. We can then lookup and parse the commited pointed - // at by the returned OID; note that this operation is specially fast - // since the raw contents of the commit object will be cached in memory - - function walk() { - revWalk.next(function(error, oid) { - if (error) throw error; - if (!oid) return; - - repo.getCommit(oid, function(error, commit) { - if (error) throw error; - - console.log("Commit:", commit.sha()); - walk(); - }); - }); - } - walk(); - }); - - // ### Index File Manipulation - - // The [index file API][gi] allows you to read, traverse, update and write - // the Git index file (sometimes thought of as the staging area). - repo.openIndex(function(error, index) { - if (error) throw error; - - // For each entry in the index, you can get a bunch of information - // including the SHA (oid), path and mode which map to the tree objects - // that are written out. It also has filesystem properties to help - // determine what to inspect for changes (ctime, mtime, dev, ino, uid, - // gid, file_size and flags) All these properties are exported publicly in - // the `IndexEntry` class - - index.entries().forEach(function(entry) { - console.log("Index Entry:", entry.path(), entry.mtime().seconds()); - }); - }); - - // ### References - - // The [reference API][ref] allows you to list, resolve, create and update - // references such as branches, tags and remote references (everything in - // the .git/refs directory). - - repo.getReferences(git.Reference.Type.All, function(error, referenceNames) { - if (error) throw error; - - referenceNames.forEach(function(referenceName) { - repo.getReference(referenceName, function(error, reference) { - if (error) throw error; - - if (reference.isConcrete()) { - console.log("Reference:", referenceName, reference.target()); - } else if (reference.isSymbolic()) { - console.log("Reference:", referenceName, reference.symbolicTarget()); - } - }); - }); - }); -}); - - diff --git a/example/new-commit.js b/example/new-commit.js deleted file mode 100755 index 4bf733c53..000000000 --- a/example/new-commit.js +++ /dev/null @@ -1,32 +0,0 @@ -var git = require('../'), - path = require('path'); - -// This example opens a certain file, `README.md`, at a particular commit, -// and prints the first 10 lines as well as some metadata. - -git.Repo.open(path.resolve(__dirname, '../.git'), function(error, repo) { - if (error) throw error; - - repo.getCommit('eebd0ead15d62eaf0ba276da53af43bbc3ce43ab', function(error, commit) { - if (error) throw error; - - commit.getTree(function(error, tree) { - if (error) throw error; - - var builder = tree.builder(), - buffer = new Buffer("this is a file\n"); - - builder.insertBlob("/lib/baz.txt", buffer, false) - builder.write(function(error, treeId) { - if (error) throw error; - - var author = git.Signature.create("Scott Chacon", "schacon@gmail.com", 123456789, 60); - var committer = git.Signature.create("Scott A Chacon", "scott@github.com", 987654321, 90); - - repo.createCommit(null, author, committer, "message", treeId, [commit], function(error, commitId) { - console.log("New Commit:", commitId.sha()); - }); - }); - }); - }); -}); diff --git a/example/read-file.js b/example/read-file.js deleted file mode 100644 index fc698b03d..000000000 --- a/example/read-file.js +++ /dev/null @@ -1,27 +0,0 @@ -var git = require('../'), - path = require('path'); - -// This example opens a certain file, `README.md`, at a particular commit, -// and prints the first 10 lines as well as some metadata. - -git.Repo.open(path.resolve(__dirname, '../.git'), function(error, repo) { - if (error) throw error; - - repo.getCommit('59b20b8d5c6ff8d09518454d4dd8b7b30f095ab5', function(error, commit) { - if (error) throw error; - - commit.getEntry('README.md', function(error, entry) { - if (error) throw error; - - entry.getBlob(function(error, blob) { - if (error) throw error; - - console.log(entry.name(), entry.sha(), blob.size() + 'b'); - console.log('========================================================\n\n'); - var firstTenLines = blob.toString().split('\n').slice(0, 10).join('\n'); - console.log(firstTenLines); - console.log('...'); - }); - }); - }); -}); diff --git a/example/remove-and-commit.js b/example/remove-and-commit.js deleted file mode 100644 index 5a4c17684..000000000 --- a/example/remove-and-commit.js +++ /dev/null @@ -1,53 +0,0 @@ -var git = require('../'), - path = require('path'), - fileName = 'newfile.txt' - ; - -/** - * This example deletes a certain file `newfile.txt`, removes it from the git index and - * commits it to head. Similar to a `git rm newfile.txt` followed by a `git commit` - * Use add-and-commit.js to create the file first. -**/ - -//open a git repo -git.Repo.open(path.resolve(__dirname, '../.git'), function(openReporError, repo) { - if (openReporError) throw openReporError; - - //remove the file from the index... - repo.openIndex(function(openIndexError, index) { - if (openIndexError) throw openIndexError; - - index.read(function(readError) { - if (readError) throw readError; - - index.removeByPath(fileName); - - index.write(function(writeError) { - if (writeError) throw writeError; - - index.writeTree(function(writeTreeError, oid) { - if (writeTreeError) throw writeTreeError; - - //get HEAD - git.Reference.oidForName(repo, 'HEAD', function(oidForName, head) { - if (oidForName) throw oidForName; - - //get latest commit (will be the parent commit) - repo.getCommit(head, function(getCommitError, parent) { - if (getCommitError) throw getCommitError; - var author = git.Signature.create("Scott Chacon", "schacon@gmail.com", 123456789, 60); - var committer = git.Signature.create("Scott A Chacon", "scott@github.com", 987654321, 90); - - //commit - repo.createCommit('HEAD', author, committer, 'message', oid, [parent], function(error, commitId) { - console.log("New Commit:", commitId.sha()); - // the file is removed from the git repo, use fs.unlink now to remove it - // from the filesystem. - }); - }); - }); - }); - }); - }); - }); -}); \ No newline at end of file diff --git a/example/walk-history.js b/example/walk-history.js deleted file mode 100644 index 4a31f806b..000000000 --- a/example/walk-history.js +++ /dev/null @@ -1,28 +0,0 @@ -var git = require('../'), - path = require('path'), - sort = git.RevWalk.Sort; - -// This code walks the history of the master branch and prints results -// that look very similar to calling `git log` from the command line - -git.Repo.open(path.resolve(__dirname, '../.git'), function(error, repo) { - if (error) throw error; - - repo.getMaster(function(error, branch) { - if (error) throw error; - - // History returns an event. - var history = branch.history(sort.Time); - - // History emits 'commit' event for each commit in the branch's history - history.on('commit', function(commit) { - console.log('commit ' + commit.sha()); - console.log('Author:', commit.author().name() + ' <' + commit.author().email() + '>'); - console.log('Date:', commit.date()); - console.log('\n ' + commit.message()); - }); - - // Don't forget to call `start()`! - history.start(); - }); -}); diff --git a/example/walk-tree.js b/example/walk-tree.js deleted file mode 100644 index c5c5f98a8..000000000 --- a/example/walk-tree.js +++ /dev/null @@ -1,27 +0,0 @@ -var git = require('../'), - path = require('path'); - -// A `tree` in git is typically a representation of the filesystem at -// a revision. A tree has a set of entries, each entry being either a -// tree (directory), or a file. - -git.Repo.open(path.resolve(__dirname, '../.git'), function(error, repo) { - if (error) throw error; - - repo.getMaster(function(error, branch) { - if (error) throw error; - - branch.getTree(function(error, tree) { - if (error) throw error; - - // `walk()` returns an event. - var walker = tree.walk(); - walker.on('entry', function(entry) { - console.log(entry.path()); - }); - - // Don't forget to call `start()`! - walker.start(); - }); - }); -}); diff --git a/examples/add-and-commit.js b/examples/add-and-commit.js new file mode 100644 index 000000000..d9af04ed7 --- /dev/null +++ b/examples/add-and-commit.js @@ -0,0 +1,75 @@ +var nodegit = require("../"); +var path = require("path"); +var promisify = require("promisify-node"); +var fse = promisify(require("fs-extra")); +var fileName = "newfile.txt"; +var fileContent = "hello world"; +var directoryName = "salad/toast/strangerinastrangeland/theresnowaythisexists"; +// ensureDir is an alias to mkdirp, which has the callback with a weird name +// and in the 3rd position of 4 (the 4th being used for recursion). We have to +// force promisify it, because promisify-node won't detect it on its +// own and assumes sync +fse.ensureDir = promisify(fse.ensureDir); + +/** + * This example creates a certain file `newfile.txt`, adds it to the git + * index and commits it to head. Similar to a `git add newfile.txt` + * followed by a `git commit` +**/ + +var repo; +var index; +var oid; + +nodegit.Repository.open(path.resolve(__dirname, "../.git")) +.then(function(repoResult) { + repo = repoResult; + return fse.ensureDir(path.join(repo.workdir(), directoryName)); +}).then(function(){ + return fse.writeFile(path.join(repo.workdir(), fileName), fileContent); +}) +.then(function() { + return fse.writeFile( + path.join(repo.workdir(), directoryName, fileName), + fileContent + ); +}) +.then(function() { + return repo.refreshIndex(); +}) +.then(function(indexResult) { + index = indexResult; +}) +.then(function() { + // this file is in the root of the directory and doesn't need a full path + return index.addByPath(fileName); +}) +.then(function() { + // this file is in a subdirectory and can use a relative path + return index.addByPath(path.join(directoryName, fileName)); +}) +.then(function() { + // this will write both files to the index + return index.write(); +}) +.then(function() { + return index.writeTree(); +}) +.then(function(oidResult) { + oid = oidResult; + return nodegit.Reference.nameToId(repo, "HEAD"); +}) +.then(function(head) { + return repo.getCommit(head); +}) +.then(function(parent) { + var author = nodegit.Signature.create("Scott Chacon", + "schacon@gmail.com", 123456789, 60); + var committer = nodegit.Signature.create("Scott A Chacon", + "scott@github.com", 987654321, 90); + + return repo.createCommit("HEAD", author, committer, "message", oid, [parent]); +}) +.done(function(commitId) { + console.log("New Commit: ", commitId); +}); diff --git a/examples/apps/git_profanity_check.js b/examples/apps/git_profanity_check.js new file mode 100644 index 000000000..c4a839636 --- /dev/null +++ b/examples/apps/git_profanity_check.js @@ -0,0 +1,59 @@ +#!/usr/bin/env node +// vim: ft=javascript + +// Copyright 2011-2014, Tim Branyen @tbranyen +// Dual licensed under the MIT and GPL licenses. +// Script to detect cursewords in commit messages and provide the offending +// commit sha's. +// +// Usage: +// +// node git_profanity_check some/repo/.git +// +var git = require("../../"); + +var curses = ["put", "curse", "words", "here"]; +var path = "./.git"; +var branch = "master"; +var reCurse = new RegExp("\\b(?:" + curses.join("|") + ")\\b", "gi"); + +// Default path is `.git`. +if (process.argv.length < 3) { + console.log("No path passed as argument, defaulting to .git."); +} +// Otherwise defaults. +else { + path = process.argv[2]; + + // Set repo branch + if (process.argv.length < 4) { + console.log("No branch passed as argument, defaulting to master."); + } + else { + branch = process.argv[3]; + } +} + +// Open repository. +git.Repo.open(path) +.then(function(repo) { + // Open branch, default to master. + return repo.getBranchCommit(branch); +}).then(function(firstCommit) { + // Iterate history + var history = firstCommit.history(); + + // Iterate over every commit message and test for words. + history.on("commit", function(commit) { + var message = commit.message(); + + if (reCurse.test(message)) { + console.log("Curse detected in commit", commit.sha()); + console.log("=> ", message); + return; + } + }); + + // Start history iteration. + history.start(); +}); diff --git a/examples/clone.js b/examples/clone.js new file mode 100644 index 000000000..a23060464 --- /dev/null +++ b/examples/clone.js @@ -0,0 +1,40 @@ +var nodegit = require("../"); +var promisify = require("promisify-node"); +var fse = promisify(require("fs-extra")); +var path = "/tmp/nodegit-clone-demo"; + +fse.remove(path).then(function() { + var entry; + + nodegit.Clone( + "https://github.com/nodegit/nodegit.git", + path, + { + fetchOpts: { + callbacks: { + certificateCheck: function() { + // github will fail cert check on some OSX machines + // this overrides that check + return 1; + } + } + } + }) + .then(function(repo) { + return repo.getCommit("59b20b8d5c6ff8d09518454d4dd8b7b30f095ab5"); + }) + .then(function(commit) { + return commit.getEntry("README.md"); + }) + .then(function(entryResult) { + entry = entryResult; + return entry.getBlob(); + }) + .done(function(blob) { + console.log(entry.name(), entry.sha(), blob.rawsize() + "b"); + console.log("========================================================\n\n"); + var firstTenLines = blob.toString().split("\n").slice(0, 10).join("\n"); + console.log(firstTenLines); + console.log("..."); + }); +}); diff --git a/examples/cloneFromGithubWith2Factor.js b/examples/cloneFromGithubWith2Factor.js new file mode 100644 index 000000000..1dbf0a2c0 --- /dev/null +++ b/examples/cloneFromGithubWith2Factor.js @@ -0,0 +1,40 @@ +var nodegit = require("../"); +var promisify = require("promisify-node"); +var fse = promisify(require("fs-extra")); +var path = "/tmp/nodegit-github-2factor-demo"; + +var token = "{Your GitHub user token}"; +var repoOwner = "{The orgname or username that owns the repo}"; +var repoName = "{The name of the repo}"; + +// To clone with 2 factor auth enabled, you have to use a github oauth token +// over https, it can't be done with actual 2 factor. +// https://github.com/blog/1270-easier-builds-and-deployments-using-git-over-https-and-oauth + +// If the repo is public, you can use a callback instead +var repoUrl = "https://github.com/" + repoOwner + "/" + repoName + ".git"; + +var opts = { + fetchOpts: { + callbacks: { + credentials: function() { + return nodegit.Cred.userpassPlaintextNew(token, "x-oauth-basic"); + }, + certificateCheck: function() { + return 1; + } + } + } +}; + +fse.remove(path).then(function() { + nodegit.Clone(repoUrl, path, opts) + .done(function(repo) { + if (repo instanceof nodegit.Repository) { + console.info("We cloned the repo!"); + } + else { + console.error("Something borked :("); + } + }); +}); diff --git a/examples/create-branch.js b/examples/create-branch.js new file mode 100644 index 000000000..28ab6bea5 --- /dev/null +++ b/examples/create-branch.js @@ -0,0 +1,18 @@ +var nodegit = require("../"); +var path = require("path"); + +nodegit.Repository.open(path.resolve(__dirname, "../.git")) + .then(function(repo) { + // Create a new branch on head + return repo.getHeadCommit() + .then(function(commit) { + return repo.createBranch( + "new-branch", + commit, + 0, + repo.defaultSignature(), + "Created new-branch on HEAD"); + }); + }).done(function() { + console.log("All done!"); + }); diff --git a/examples/create-new-repo.js b/examples/create-new-repo.js new file mode 100644 index 000000000..6cabe9a75 --- /dev/null +++ b/examples/create-new-repo.js @@ -0,0 +1,49 @@ +var nodegit = require("../"); +var path = require("path"); +var promisify = require("promisify-node"); +var fse = promisify(require("fs-extra")); +var fileName = "newfile.txt"; +var fileContent = "hello world"; +var repoDir = "../../newRepo"; + +fse.ensureDir = promisify(fse.ensureDir); + +var repository; +var index; + +fse.ensureDir(path.resolve(__dirname, repoDir)) +.then(function() { + return nodegit.Repository.init(path.resolve(__dirname, repoDir), 0); +}) +.then(function(repo) { + repository = repo; + return fse.writeFile(path.join(repository.workdir(), fileName), fileContent); +}) +.then(function(){ + return repository.refreshIndex(); +}) +.then(function(idx) { + index = idx; +}) +.then(function() { + return index.addByPath(fileName); +}) +.then(function() { + return index.write(); +}) +.then(function() { + return index.writeTree(); +}) +.then(function(oid) { + var author = nodegit.Signature.create("Scott Chacon", + "schacon@gmail.com", 123456789, 60); + var committer = nodegit.Signature.create("Scott A Chacon", + "scott@github.com", 987654321, 90); + + // Since we're creating an inital commit, it has no parents. Note that unlike + // normal we don't get the head either, because there isn't one yet. + return repository.createCommit("HEAD", author, committer, "message", oid, []); +}) +.done(function(commitId) { + console.log("New Commit: ", commitId); +}); diff --git a/examples/details-for-tree-entry.js b/examples/details-for-tree-entry.js new file mode 100644 index 000000000..6c4a0dc28 --- /dev/null +++ b/examples/details-for-tree-entry.js @@ -0,0 +1,29 @@ +var nodegit = require("../"); +var path = require("path"); + +/** + * This shows how to get details from a tree entry or a blob +**/ + +nodegit.Repository.open(path.resolve(__dirname, "../.git")) + .then(function(repo) { + return repo.getTree("e1b0c7ea57bfc5e30ec279402a98168a27838ac9") + .then(function(tree) { + var treeEntry = tree.entryByIndex(0); + + // Tree entry doesn't have any data associated with the actual entry + // To get that we need to get the index entry that this points to + return repo.refreshIndex().then(function(index) { + var indexEntry = index.getByPath(treeEntry.path()); + + // With the index entry we can now view the details for the tree entry + console.log("Entry path: " + indexEntry.path()); + console.log("Entry time in seconds: " + indexEntry.mtime().seconds()); + console.log("Entry oid: " + indexEntry.id().toString()); + console.log("Entry size: " + indexEntry.fileSize()); + }); + }); + }) + .done(function() { + console.log("Done!"); + }); diff --git a/examples/diff-commits.js b/examples/diff-commits.js new file mode 100644 index 000000000..f68d0fcfb --- /dev/null +++ b/examples/diff-commits.js @@ -0,0 +1,41 @@ +var nodegit = require("../"); +var path = require("path"); + +// This code examines the diffs between a particular commit and all of its +// parents. Since this commit is not a merge, it only has one parent. This is +// similar to doing `git show`. + +nodegit.Repository.open(path.resolve(__dirname, "../.git")) +.then(function(repo) { + return repo.getCommit("59b20b8d5c6ff8d09518454d4dd8b7b30f095ab5"); +}) +.then(function(commit) { + console.log("commit " + commit.sha()); + console.log("Author:", commit.author().name() + + " <" + commit.author().email() + ">"); + console.log("Date:", commit.date()); + console.log("\n " + commit.message()); + + return commit.getDiff(); +}) +.done(function(diffList) { + diffList.forEach(function(diff) { + diff.patches().then(function(patches) { + patches.forEach(function(patch) { + patch.hunks().then(function(hunks) { + hunks.forEach(function(hunk) { + hunk.lines().then(function(lines) { + console.log("diff", patch.oldFile().path(), + patch.newFile().path()); + console.log(hunk.header().trim()); + lines.forEach(function(line) { + console.log(String.fromCharCode(line.origin()) + + line.content().trim()); + }); + }); + }); + }); + }); + }); + }); +}); diff --git a/examples/fetch.js b/examples/fetch.js new file mode 100644 index 000000000..ffde30ee1 --- /dev/null +++ b/examples/fetch.js @@ -0,0 +1,15 @@ +var nodegit = require("../"); +var path = require("path"); + +nodegit.Repository.open(path.resolve(__dirname, "../.git")) + .then(function(repo) { + return repo.fetch("origin", { + callbacks: { + credentials: function(url, userName) { + return nodegit.Cred.sshKeyFromAgent(userName); + } + } + }); + }).done(function() { + console.log("It worked!"); + }); diff --git a/examples/general.js b/examples/general.js new file mode 100644 index 000000000..ce432cb50 --- /dev/null +++ b/examples/general.js @@ -0,0 +1,366 @@ +var nodegit = require("../"); +var path = require("path"); +var oid; +var odb; +var repo; + +// **nodegit** is a javascript library for node.js that wraps libgit2, a +// pure C implementation of the Git core. It provides an asynchronous +// interface around any functions that do I/O, and a sychronous interface +// around the rest. +// +// This file is an example of using that API in a real, JS file. +// +// **libgit2** (for the most part) only implements the core plumbing +// functions, not really the higher level porcelain stuff. For a primer on +// Git Internals that you will need to know to work with Git at this level, +// check out [Chapter 9][pg] of the Pro Git book. + +// Nearly, all git operations in the context of a repository. +// To open a repository, + +nodegit.Repository.open(path.resolve(__dirname, "../.git")) + .then(function(repoResult) { + repo = repoResult; + console.log("Opened repository."); + + // ### SHA-1 Value Conversions + + // Objects in git (commits, blobs, etc.) are referred to by their SHA value + // **nodegit** uses a simple wrapper around hash values called an `Oid`. + // The oid validates that the SHA is well-formed. + + oid = nodegit.Oid.fromString("c27d9c35e3715539d941254f2ce57042b978c49c"); + + // Most functions in in **nodegit** that take an oid will also take a + // string, so for example, you can look up a commit by a string SHA or + // an Oid, but but any functions that create new SHAs will always return + // an Oid. + + // If you have a oid, you can easily get the hex value of the SHA again. + console.log("Sha hex string:", oid.toString()); + + // ### Working with the Object Database + + // **libgit2** provides [direct access][odb] to the object database. The + // object database is where the actual objects are stored in Git. For + // working with raw objects, we'll need to get this structure from the + // repository. + return repo.odb(); + }) + + .then(function(odbResult) { + odb = odbResult; + + // We can read raw objects directly from the object database if we have + // the oid (SHA) of the object. This allows us to access objects without + // knowing thier type and inspect the raw bytes unparsed. + + return odb.read(oid); + }) + + .then(function(object) { + // A raw object only has three properties - the type (commit, blob, tree + // or tag), the size of the raw data and the raw, unparsed data itself. + // For a commit or tag, that raw data is human readable plain ASCII + // text. For a blob it is just file contents, so it could be text or + // binary data. For a tree it is a special binary format, so it's unlikely + // to be hugely helpful as a raw object. + var data = object.data(); + var type = object.type(); + var size = object.size(); + + console.log("Object size and type:", size, type); + console.log("Raw data: ", data.toString().substring(100), "..."); + + }) + + .then(function() { + // You can also write raw object data to Git. This is pretty cool because + // it gives you direct access to the key/value properties of Git. Here + // we'll write a new blob object that just contains a simple string. + // Notice that we have to specify the object type. + return odb.write("test data", "test data".length, nodegit.Object.TYPE.BLOB); + }) + + .then(function(oid) { + // Now that we've written the object, we can check out what SHA1 was + // generated when the object was written to our database. + console.log("Written Object: ", oid.toString()); + }) + + .then(function() { + // ### Object Parsing + + // libgit2 has methods to parse every object type in Git so you don't have + // to work directly with the raw data. This is much faster and simpler + // than trying to deal with the raw data yourself. + + // #### Commit Parsing + + // [Parsing commit objects][pco] is simple and gives you access to all the + // data in the commit - the author (name, email, datetime), committer + // (same), tree, message, encoding and parent(s). + + oid = nodegit.Oid.fromString("698c74e817243efe441a5d1f3cbaf3998282ca86"); + + // Many methods in **nodegit** are asynchronous, because they do file + // or network I/O. By convention, all asynchronous methods are named + // imperatively, like `getCommit`, `open`, `read`, `write`, etc., whereas + // synchronous methods are named nominatively, like `type`, `size`, `name`. + + return repo.getCommit(oid); + }) + + .then(function(commit) { + // Each of the properties of the commit object are accessible via methods, + // including commonly needed variations, such as `git_commit_time` which + // returns the author time and `git_commit_message` which gives you the + // commit message. + console.log("Commit:", commit.message(), + commit.author().name(), commit.date()); + + // Commits can have zero or more parents. The first (root) commit will + // have no parents, most commits will have one (i.e. the commit it was + // based on) and merge commits will have two or more. Commits can + // technically have any number, though it's rare to have more than two. + return commit.getParents(); + }) + + .then(function(parents) { + parents.forEach(function(parent) { + console.log("Parent:", parent.toString()); + }); + }) + + .then(function() { + // #### Writing Commits + + // nodegit provides a couple of methods to create commit objects easily as + // well. + var author = nodegit.Signature.create("Scott Chacon", + "schacon@gmail.com", 123456789, 60); + var committer = nodegit.Signature.create("Scott A Chacon", + "scott@github.com", 987654321, 90); + + // Commit objects need a tree to point to and optionally one or more + // parents. Here we're creating oid objects to create the commit with, + // but you can also use existing ones: + var treeId = nodegit.Oid.fromString( + "4170d10f19600b9cb086504e8e05fe7d863358a2"); + var parentId = nodegit.Oid.fromString( + "eebd0ead15d62eaf0ba276da53af43bbc3ce43ab"); + + return repo.getTree(treeId).then(function(tree) { + return repo.getCommit(parentId).then(function(parent) { + // Here we actually create the commit object with a single call with all + // the values we need to create the commit. The SHA key is written to + // the `commit_id` variable here. + return repo.createCommit( + null /* do not update the HEAD */, + author, + committer, + "example commit", + tree, + [parent]); + }).then(function(oid) { + console.log("New Commit:", oid.toString()); + }); + }); + }) + + .then(function() { + // #### Tag Parsing + + // You can parse and create tags with the [tag management API][tm], which + // functions very similarly to the commit lookup, parsing and creation + // methods, since the objects themselves are very similar. + + oid = nodegit.Oid.fromString("dcc4aa9fcdaced037434cb149ed3b6eab4d0709d"); + return repo.getTag(oid); + }) + + .then(function(tag) { + // Now that we have the tag object, we can extract the information it + // generally contains: the target (usually a commit object), the type of + // the target object (usually "commit"), the name ("v1.0"), the tagger (a + // git_signature - name, email, timestamp), and the tag message. + console.log(tag.name(), tag.targetType(), tag.message()); + + return tag.target(); + }) + + .then(function (target) { + console.log("Target is commit:", target.isCommit()); + }) + + .then(function() { + // #### Tree Parsing + + // A Tree is how Git represents the state of the filesystem + // at a given revision. In general, a tree corresponds to a directory, + // and files in that directory are either files (blobs) or directories. + + // [Tree parsing][tp] is a bit different than the other objects, in that + // we have a subtype which is the tree entry. This is not an actual + // object type in Git, but a useful structure for parsing and traversing + // tree entries. + + oid = nodegit.Oid.fromString("e1b0c7ea57bfc5e30ec279402a98168a27838ac9"); + return repo.getTree(oid); + }) + + .then(function(tree) { + console.log("Tree Size:", tree.entryCount()); + + function dfs(tree) { + var promises = []; + + tree.entries().forEach(function(entry) { + if (entry.isDirectory()) { + promises.push(entry.getTree().then(dfs)); + } else if (entry.isFile()) { + console.log("Tree Entry:", entry.name()); + } + }); + + return Promise.all(promises); + } + + return dfs(tree).then(function() { + // You can also access tree entries by path if you know the path of the + // entry you're looking for. + return tree.getEntry("example/general.js").then(function(entry) { + // Entries which are files have blobs associated with them: + entry.getBlob(function(error, blob) { + console.log("Blob size:", blob.size()); + }); + }); + }); + }) + + .then(function() { + // #### Blob Parsing + + // The last object type is the simplest and requires the least parsing + // help. Blobs are just file contents and can contain anything, there is + // no structure to it. The main advantage to using the [simple blob + // api][ba] is that when you're creating blobs you don't have to calculate + // the size of the content. There is also a helper for reading a file + // from disk and writing it to the db and getting the oid back so you + // don't have to do all those steps yourself. + + oid = nodegit.Oid.fromString("991c06b7b1ec6f939488427e4b41a4fa3e1edd5f"); + return repo.getBlob(oid); + }) + + .then(function(blob) { + // You can access a node.js Buffer with the raw contents + // of the blob directly. Note that this buffer may not + // contain ASCII data for certain blobs (e.g. binary files). + var buffer = blob.content(); + + // If you know that the blob is UTF-8, however, + console.log("Blob contents:", blob.toString().slice(0, 38)); + console.log("Buffer:", buffer.toString().substring(100), "..."); + }) + + .then(function() { + // ### Revwalking + + // The libgit2 [revision walking api][rw] provides methods to traverse the + // directed graph created by the parent pointers of the commit objects. + // Since all commits point back to the commit that came directly before + // them, you can walk this parentage as a graph and find all the commits + // that were ancestors of (reachable from) a given starting point. This + // can allow you to create `git log` type functionality. + + oid = nodegit.Oid.fromString("698c74e817243efe441a5d1f3cbaf3998282ca86"); + + // To use the revwalker, create a new walker, tell it how you want to sort + // the output and then push one or more starting points onto the walker. + // If you want to emulate the output of `git log` you would push the SHA + // of the commit that HEAD points to into the walker and then start + // traversing them. You can also "hide" commits that you want to stop at + // or not see any of their ancestors. So if you want to emulate `git log + // branch1..branch2`, you would push the oid of `branch2` and hide the oid + // of `branch1`. + var revWalk = repo.createRevWalk(); + + revWalk.sorting(nodegit.Revwalk.SORT.TOPOLOGICAL, + nodegit.Revwalk.SORT.REVERSE); + + revWalk.push(oid); + + // Now that we have the starting point pushed onto the walker, we start + // asking for ancestors. It will return them in the sorting order we asked + // for as commit oids. We can then lookup and parse the commited pointed + // at by the returned OID; note that this operation is specially fast + // since the raw contents of the commit object will be cached in memory + + function walk() { + return revWalk.next().then(function(oid) { + if (!oid) { + return; + } + + return repo.getCommit(oid).then(function(commit) { + console.log("Commit:", commit.toString()); + return walk(); + }); + }); + } + + return walk(); + }) + + .then(function() { + // ### Index File Manipulation + + // The [index file API][gi] allows you to read, traverse, update and write + // the Git index file (sometimes thought of as the staging area). + return repo.refreshIndex(); + }) + + .then(function(index) { + // For each entry in the index, you can get a bunch of information + // including the SHA (oid), path and mode which map to the tree objects + // that are written out. It also has filesystem properties to help + // determine what to inspect for changes (ctime, mtime, dev, ino, uid, + // gid, file_size and flags) All these properties are exported publicly in + // the `IndexEntry` class + + index.entries().forEach(function(entry) { + console.log("Index Entry:", entry.path(), entry.mtime().seconds()); + }); + }) + + .then(function() { + // ### References + + // The [reference API][ref] allows you to list, resolve, create and update + // references such as branches, tags and remote references (everything in + // the .git/refs directory). + + return repo.getReferenceNames(nodegit.Reference.TYPE.LISTALL); + }) + + .then(function(referenceNames) { + var promises = []; + + referenceNames.forEach(function(referenceName) { + promises.push(repo.getReference(referenceName).then(function(reference) { + if (reference.isConcrete()) { + console.log("Reference:", referenceName, reference.target()); + } else if (reference.isSymbolic()) { + console.log("Reference:", referenceName, reference.symbolicTarget()); + } + })); + }); + + return Promise.all(promises); + }) + + .done(function() { + console.log("Done!"); + }); diff --git a/examples/index-add-and-remove.js b/examples/index-add-and-remove.js new file mode 100644 index 000000000..d34199bde --- /dev/null +++ b/examples/index-add-and-remove.js @@ -0,0 +1,133 @@ +var nodegit = require("../"); +var path = require("path"); +var promisify = require("promisify-node"); +var fse = promisify(require("fs-extra")); + +nodegit.Repository.open(path.resolve(__dirname, "../.git")) + .then(function(repo) { + return repo.refreshIndex() + .then(function(index) { + var fileContent = { + newFile1: "this has some content", + newFile2: "and this will have more content" + }; + var fileNames = Object.keys(fileContent); + + return Promise.all(fileNames.map(function(fileName) { + fse.writeFile( + path.join(repo.workdir(), fileName), fileContent[fileName]); + })) + + + + // This will add all files to the index + .then(function() { + return index.addAll(); + }) + .then(function() { + var newFiles = index.entries().filter(function(entry) { + return ~fileNames.indexOf(entry.path); + }); + + console.log( + "\n-------------------\n" + + "Added files: " + + "\n-------------------\n"); + newFiles.forEach(function(entry) { + console.log(entry.path); + }); + }) + .then(function() { + // This will remove the files from the index + return index.removeAll("newFile*"); + }) + .then(function() { + var newFiles = index.entries().filter(function(entry) { + return ~fileNames.indexOf(entry.path); + }); + + console.log("New files in index: " + newFiles.length); + }) + + + + // We can also provide a pattern to add files to the index + .then(function() { + return index.addAll("newFile*"); + }) + .then(function() { + var newFiles = index.entries().filter(function(entry) { + return ~fileNames.indexOf(entry.path); + }); + + console.log( + "\n-------------------\n" + + "Added files with pattern: " + + "\n-------------------\n"); + newFiles.forEach(function(entry) { + console.log(entry.path); + }); + }) + .then(function() { + // We're also using the pattern in the remove + return index.removeAll("newFile*"); + }) + .then(function() { + var newFiles = index.entries().filter(function(entry) { + return ~fileNames.indexOf(entry.path); + }); + + console.log("New files in index: " + newFiles.length); + }) + + + + // Callbacks can be used for a finer degree of control over what + // we add to the index + .then(function() { + return index.addAll( + "newFile*", + nodegit.Index.ADD_OPTION.ADD_CHECK_PATHSPEC, + function(path, matchedPattern) { + if (path == "newFile1") { + return 0; // add the file + } + + return 1; // skip the file + }); + }) + .then(function() { + var newFiles = index.entries().filter(function(entry) { + return ~fileNames.indexOf(entry.path); + }); + + console.log( + "\n-------------------\n" + + "Added files with callback: " + + "\n-------------------\n"); + newFiles.forEach(function(entry) { + console.log(entry.path); + }); + }) + .then(function() { + // Lets use a callback in the remove as well + return index.removeAll(null, function(path) { + if (~path.indexOf("newFile")) { + return 0; // remove the file + } + + return 1; // don't remove the file + }); + }) + .then(function() { + var newFiles = index.entries().filter(function(entry) { + return ~fileNames.indexOf(entry.path); + }); + + console.log("Total: " + index.entries().length); + console.log("New files in index: " + newFiles.length); + }); + }); + }).done(function() { + console.log("All done!"); + }); diff --git a/examples/merge-cleanly.js b/examples/merge-cleanly.js new file mode 100644 index 000000000..c338cb90f --- /dev/null +++ b/examples/merge-cleanly.js @@ -0,0 +1,136 @@ +var nodegit = require("../"); +var path = require("path"); +var promisify = require("promisify-node"); +var fse = promisify(require("fs-extra")); +fse.ensureDir = promisify(fse.ensureDir); + +var ourFileName = "ourNewFile.txt"; +var ourFileContent = "I like Toll Roads. I have an EZ-Pass!"; +var ourBranchName = "ours"; + +var theirFileName = "theirNewFile.txt"; +var theirFileContent = "I'm skeptical about Toll Roads"; +var theirBranchName = "theirs"; + +var repoDir = "../../newRepo"; + +var repository; +var ourCommit; +var theirCommit; +var ourBranch; +var theirBranch; + +var ourSignature = nodegit.Signature.create("Ron Paul", + "RonPaul@TollRoadsRBest.info", 123456789, 60); +var theirSignature = nodegit.Signature.create("Greg Abbott", + "Gregggg@IllTollYourFace.us", 123456789, 60); + +// Create a new repository in a clean directory, and add our first file +fse.remove(path.resolve(__dirname, repoDir)) +.then(function() { + return fse.ensureDir(path.resolve(__dirname, repoDir)); +}) +.then(function() { + return nodegit.Repository.init(path.resolve(__dirname, repoDir), 0); +}) +.then(function(repo) { + repository = repo; + return fse.writeFile( + path.join(repository.workdir(), ourFileName), + ourFileContent + ); +}) + +// Load up the repository index and make our initial commit to HEAD +.then(function() { + return repository.refreshIndex(); +}) +.then(function(index) { + return index.addByPath(ourFileName) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }); +}) +.then(function(oid) { + return repository.createCommit("HEAD", ourSignature, + ourSignature, "we made a commit", oid, []); +}) + +// Get commit object from the oid, and create our new branches at that position +.then(function(commitOid) { + return repository.getCommit(commitOid).then(function(commit) { + ourCommit = commit; + }).then(function() { + return repository.createBranch(ourBranchName, commitOid) + .then(function(branch) { + ourBranch = branch; + return repository.createBranch(theirBranchName, commitOid); + }); + }); +}) + +// Create a new file, stage it and commit it to our second branch +.then(function(branch) { + theirBranch = branch; + return fse.writeFile( + path.join(repository.workdir(), theirFileName), + theirFileContent + ); +}) +.then(function() { + return repository.refreshIndex(); +}) +.then(function(index) { + return index.addByPath(theirFileName) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }); +}) +.then(function(oid) { + // You don"t have to change head to make a commit to a different branch. + return repository.createCommit(theirBranch.name(), theirSignature, + theirSignature, "they made a commit", oid, [ourCommit]); +}) +.then(function(commitOid) { + return repository.getCommit(commitOid).then(function(commit) { + theirCommit = commit; + }); +}) + + +// Merge the two commits +.then(function() { + return nodegit.Merge.commits(repository, ourCommit, theirCommit); +}) + + +// Merging returns an index that isn't backed by the repository. +// You have to manually check for merge conflicts. If there are none +// you just have to write the index. You do have to write it to +// the repository instead of just writing it. +.then(function(index) { + if (!index.hasConflicts()) { + return index.write() + .then(function() { + return index.writeTreeTo(repository); + }); + } +}) + + +// Create our merge commit back on our branch +.then(function(oid) { + return repository.createCommit(ourBranch.name(), ourSignature, + ourSignature, "we merged their commit", oid, [ourCommit, theirCommit]); +}) +.done(function(commitId) { + // We never changed the HEAD after the initial commit; + // it should still be the same as master. + console.log("New Commit: ", commitId); +}); diff --git a/examples/merge-with-conflicts.js b/examples/merge-with-conflicts.js new file mode 100644 index 000000000..13e07c8d5 --- /dev/null +++ b/examples/merge-with-conflicts.js @@ -0,0 +1,201 @@ +var nodegit = require("../"); +var path = require("path"); +var promisify = require("promisify-node"); +var fse = promisify(require("fs-extra")); +fse.ensureDir = promisify(fse.ensureDir); + +var repoDir = "../../newRepo"; +var fileName = "newFile.txt"; + +var baseFileContent = "All Bobs are created equal. ish.\n"; +var ourFileContent = "Big Bobs are best, IMHO.\n"; +var theirFileContent = "Nobody expects the small Bobquisition!\n"; +var finalFileContent = "Big Bobs are beautiful and the small are unexpected!\n"; + +var baseSignature = nodegit.Signature.create("Peaceful Bob", + "justchill@bob.net", 123456789, 60); +var ourSignature = nodegit.Signature.create("Big Bob", + "impressive@bob.net", 123456789, 60); +var theirSignature = nodegit.Signature.create("Small Bob", + "underestimated@bob.net", 123456789, 60); + +var ourBranchName = "ours"; +var theirBranchName = "theirs"; + +var repository; +var baseCommit; +var baseCommitOid; +var ourCommit; +var theirCommit; +var ourBranch; +var theirBranch; + +// Create a new repository in a clean directory, and add our first file +fse.remove(path.resolve(__dirname, repoDir)) +.then(function() { + return fse.ensureDir(path.resolve(__dirname, repoDir)); +}) +.then(function() { + return nodegit.Repository.init(path.resolve(__dirname, repoDir), 0); +}) +.then(function(repo) { + repository = repo; + return fse.writeFile( + path.join(repository.workdir(), fileName), + baseFileContent + ); +}) + + +// Load up the repository index and make our initial commit to HEAD +.then(function() { + return repository.refreshIndex(); +}) +.then(function(index) { + return index.addByPath(fileName) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }); +}) +.then(function(oid) { + return repository.createCommit("HEAD", baseSignature, + baseSignature, "bobs are all ok", oid, []); +}) +.then(function(commitOid) { + baseCommitOid = commitOid; + return repository.getCommit(commitOid) + .then(function(commit) { + baseCommit = commit; + }); +}) + + +// create our branches +.then(function() { + return repository.createBranch(ourBranchName, baseCommitOid) + .then(function(branch) { + ourBranch = branch; + }); +}) +.then(function() { + return repository.createBranch(theirBranchName, baseCommitOid) + .then(function(branch) { + theirBranch = branch; + }); +}) + + +// Write and commit our version of the file +.then(function() { + return fse.writeFile( + path.join(repository.workdir(), fileName), + ourFileContent + ); +}) +.then(function() { + return repository.refreshIndex() + .then(function(index) { + return index.addByPath(fileName) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }); + }); +}) +.then(function(oid) { + return repository.createCommit(ourBranch.name(), ourSignature, + ourSignature, "lol big bobs :yesway:", oid, [baseCommit]); +}) +.then(function(commitOid) { + return repository.getCommit(commitOid).then(function(commit) { + ourCommit = commit; + }); +}) + + +// Write and commit their version of the file +.then(function() { + return fse.writeFile( + path.join(repository.workdir(), fileName), + theirFileContent + ); +}) +.then(function() { + return repository.refreshIndex() + .then(function(index) { + return index.addByPath(fileName) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }); + }); +}) +.then(function(oid) { + return repository.createCommit(theirBranch.name(), theirSignature, + theirSignature, "lol big bobs :poop:", oid, [baseCommit]); +}) +.then(function(commitOid) { + return repository.getCommit(commitOid).then(function(commit) { + theirCommit = commit; + }); +}) + + +// move the head to our branch, just to keep things tidy +.then(function() { + return nodegit.Reference.lookup(repository, "HEAD"); +}) +.then(function(head) { + return head.symbolicSetTarget(ourBranch.name(), ""); +}) + + +// Merge their branch into our branch +.then(function() { + return nodegit.Merge.commits(repository, ourCommit, theirCommit, null); +}) + +// Merging returns an index that isn't backed by the repository. +// You have to write it to the repository instead of just writing it. +.then(function(index) { + if (index.hasConflicts()) { + console.log("Conflict time!"); + + // if the merge had comflicts, solve them + // (in this case, we simply overwrite the file) + fse.writeFileSync( + path.join(repository.workdir(), fileName), + finalFileContent + ); + } +}) + +// we need to get a new index as the other one isnt backed to +// the repository in the usual fashion, and just behaves weirdly +.then(function() { + return repository.refreshIndex() + .then(function(index) { + return index.addByPath(fileName) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }); + }); +}) +.then(function(oid) { + // create the new merge commit on our branch + return repository.createCommit(ourBranch.name(), baseSignature, + baseSignature, "Stop this bob sized fued", oid, [ourCommit, theirCommit]); +}) +.done(function(commitId) { + console.log("New Commit: ", commitId); +}); diff --git a/examples/pull.js b/examples/pull.js new file mode 100644 index 000000000..7f5fc9af0 --- /dev/null +++ b/examples/pull.js @@ -0,0 +1,31 @@ +var nodegit = require("../"); +var path = require("path"); + +var repoDir = "../../test"; + +var repository; + +// Open a repository that needs to be fetched and fast-forwarded +nodegit.Repository.open(path.resolve(__dirname, repoDir)) + .then(function(repo) { + repository = repo; + + return repository.fetchAll({ + callbacks: { + credentials: function(url, userName) { + return nodegit.Cred.sshKeyFromAgent(userName); + }, + certificateCheck: function() { + return 1; + } + } + }); + }) + // Now that we're finished fetching, go ahead and merge our local branch + // with the new one + .then(function() { + return repository.mergeBranches("master", "origin/master"); + }) + .done(function() { + console.log("Done!"); + }); diff --git a/examples/push.js b/examples/push.js new file mode 100644 index 000000000..ac340820f --- /dev/null +++ b/examples/push.js @@ -0,0 +1,70 @@ +var nodegit = require("../"); +var path = require("path"); +var promisify = require("promisify-node"); +var fse = promisify(require("fs-extra")); +fse.ensureDir = promisify(fse.ensureDir); + +var fileName = "newFile.txt"; +var fileContent = "hello world"; + +var repoDir = "../../newRepo"; + +var repository; +var remote; + +var signature = nodegit.Signature.create("Foo bar", + "foo@bar.com", 123456789, 60); + +// Create a new repository in a clean directory, and add our first file +fse.remove(path.resolve(__dirname, repoDir)) +.then(function() { + return fse.ensureDir(path.resolve(__dirname, repoDir)); +}) +.then(function() { + return nodegit.Repository.init(path.resolve(__dirname, repoDir), 0); +}) +.then(function(repo) { + repository = repo; + return fse.writeFile(path.join(repository.workdir(), fileName), fileContent); +}) + +// Load up the repository index and make our initial commit to HEAD +.then(function() { + return repository.refreshIndex(); +}) +.then(function(index) { + return index.addByPath(fileName) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }); +}) +.then(function(oid) { + return repository.createCommit("HEAD", signature, signature, + "initial commit", oid, []); +}) + +// Add a new remote +.then(function() { + return nodegit.Remote.create(repository, "origin", + "git@github.com:nodegit/push-example.git") + .then(function(remoteResult) { + remote = remoteResult; + + // Create the push object for this remote + return remote.push( + ["refs/heads/master:refs/heads/master"], + { + callbacks: { + credentials: function(url, userName) { + return nodegit.Cred.sshKeyFromAgent(userName); + } + } + } + ); + }); +}).done(function() { + console.log("Done!"); +}); diff --git a/examples/read-file.js b/examples/read-file.js new file mode 100644 index 000000000..991a5ae39 --- /dev/null +++ b/examples/read-file.js @@ -0,0 +1,25 @@ +var nodegit = require("../"), + path = require("path"); + +// This example opens a certain file, `README.md`, at a particular commit, +// and prints the first 10 lines as well as some metadata. +var _entry; +nodegit.Repository.open(path.resolve(__dirname, "../.git")) + .then(function(repo) { + return repo.getCommit("59b20b8d5c6ff8d09518454d4dd8b7b30f095ab5"); + }) + .then(function(commit) { + return commit.getEntry("README.md"); + }) + .then(function(entry) { + _entry = entry; + return _entry.getBlob(); + }) + .then(function(blob) { + console.log(_entry.name(), _entry.sha(), blob.rawsize() + "b"); + console.log("========================================================\n\n"); + var firstTenLines = blob.toString().split("\n").slice(0, 10).join("\n"); + console.log(firstTenLines); + console.log("..."); + }) + .done(); diff --git a/examples/remove-and-commit.js b/examples/remove-and-commit.js new file mode 100644 index 000000000..10751f061 --- /dev/null +++ b/examples/remove-and-commit.js @@ -0,0 +1,56 @@ +var nodegit = require("../"), + path = require("path"), + fileName = "newfile.txt"; + +/** + * This example deletes a certain file `newfile.txt`, + * removes it from the git index and commits it to head. Similar to a + * `git rm newfile.txt` followed by a `git commit`. Use add-and-commit.js + * to create the file first. +**/ + +var _repository; +var _index; +var _oid; + +//open a git repo +nodegit.Repository.open(path.resolve(__dirname, "../.git")) + .then(function(repo) { + _repository = repo; + return repo.refreshIndex(); + }) + .then(function(index){ + _index = index; + }) + .then(function() { + //remove the file from the index... + return _index.removeByPath(fileName); + }) + .then(function() { + return _index.write(); + }) + .then(function() { + return _index.writeTree(); + }) + .then(function(oid) { + _oid = oid; + return nodegit.Reference.nameToId(_repository, "HEAD"); + }) + .then(function(head) { + return _repository.getCommit(head); + }) + .then(function(parent) { + var author = nodegit.Signature.create("Scott Chacon", + "schacon@gmail.com", 123456789, 60); + var committer = nodegit.Signature.create("Scott A Chacon", + "scott@github.com", 987654321, 90); + + return _repository.createCommit("HEAD", author, committer, + "message", _oid, [parent]); + }) + .then(function(commitId) { + // the file is removed from the git repo, use fs.unlink now to remove it + // from the filesystem. + console.log("New Commit:", commitId.allocfmt()); + }) + .done(); diff --git a/examples/status.js b/examples/status.js new file mode 100644 index 000000000..9a04bae10 --- /dev/null +++ b/examples/status.js @@ -0,0 +1,24 @@ +var nodegit = require("../"), + path = require("path"); + +// This code shows working directory changes similar to git status + +nodegit.Repository.open(path.resolve(__dirname, "../.git")) + .then(function(repo) { + repo.getStatus().then(function(statuses) { + function statusToText(status) { + var words = []; + if (status.isNew()) { words.push("NEW"); } + if (status.isModified()) { words.push("MODIFIED"); } + if (status.isTypechange()) { words.push("TYPECHANGE"); } + if (status.isRenamed()) { words.push("RENAMED"); } + if (status.isIgnored()) { words.push("IGNORED"); } + + return words.join(" "); + } + + statuses.forEach(function(file) { + console.log(file.path() + " " + statusToText(file)); + }); + }); +}); diff --git a/examples/walk-history-for-file.js b/examples/walk-history-for-file.js new file mode 100644 index 000000000..f09415455 --- /dev/null +++ b/examples/walk-history-for-file.js @@ -0,0 +1,62 @@ +var nodegit = require("../"), + path = require("path"), + historyFile = "generate/input/descriptor.json", + walker, + historyCommits = [], + commit, + repo; + +// This code walks the history of the master branch and prints results +// that look very similar to calling `git log` from the command line + +function compileHistory(resultingArrayOfCommits) { + var lastSha; + if (historyCommits.length > 0) { + lastSha = historyCommits[historyCommits.length - 1].commit.sha(); + if ( + resultingArrayOfCommits.length == 1 && + resultingArrayOfCommits[0].commit.sha() == lastSha + ) { + return; + } + } + + resultingArrayOfCommits.forEach(function(entry) { + historyCommits.push(entry); + }); + + lastSha = historyCommits[historyCommits.length - 1].commit.sha(); + + walker = repo.createRevWalk(); + walker.push(lastSha); + walker.sorting(nodegit.Revwalk.SORT.TIME); + + return walker.fileHistoryWalk(historyFile, 500) + .then(compileHistory); +} + +nodegit.Repository.open(path.resolve(__dirname, "../.git")) + .then(function(r) { + repo = r; + return repo.getMasterCommit(); + }) + .then(function(firstCommitOnMaster){ + // History returns an event. + walker = repo.createRevWalk(); + walker.push(firstCommitOnMaster.sha()); + walker.sorting(nodegit.Revwalk.SORT.Time); + + return walker.fileHistoryWalk(historyFile, 500); + }) + .then(compileHistory) + .then(function() { + historyCommits.forEach(function(entry) { + commit = entry.commit; + console.log("commit " + commit.sha()); + console.log("Author:", commit.author().name() + + " <" + commit.author().email() + ">"); + console.log("Date:", commit.date()); + console.log("\n " + commit.message()); + }); + }) + .done(); diff --git a/examples/walk-history.js b/examples/walk-history.js new file mode 100644 index 000000000..2e41ce2da --- /dev/null +++ b/examples/walk-history.js @@ -0,0 +1,27 @@ +var nodegit = require("../"), + path = require("path"); + +// This code walks the history of the master branch and prints results +// that look very similar to calling `git log` from the command line + +nodegit.Repository.open(path.resolve(__dirname, "../.git")) + .then(function(repo) { + return repo.getMasterCommit(); + }) + .then(function(firstCommitOnMaster){ + // History returns an event. + var history = firstCommitOnMaster.history(nodegit.Revwalk.SORT.Time); + + // History emits "commit" event for each commit in the branch's history + history.on("commit", function(commit) { + console.log("commit " + commit.sha()); + console.log("Author:", commit.author().name() + + " <" + commit.author().email() + ">"); + console.log("Date:", commit.date()); + console.log("\n " + commit.message()); + }); + + // Don't forget to call `start()`! + history.start(); + }) + .done(); diff --git a/examples/walk-tree.js b/examples/walk-tree.js new file mode 100644 index 000000000..6c564acb6 --- /dev/null +++ b/examples/walk-tree.js @@ -0,0 +1,25 @@ +var nodegit = require("../"), + path = require("path"); + +// A `tree` in git is typically a representation of the filesystem at +// a revision. A tree has a set of entries, each entry being either a +// tree (directory), or a file. + +nodegit.Repository.open(path.resolve(__dirname, "../.git")) + .then(function(repo) { + return repo.getMasterCommit(); + }) + .then(function(firstCommitOnMaster) { + return firstCommitOnMaster.getTree(); + }) + .then(function(tree) { + // `walk()` returns an event. + var walker = tree.walk(); + walker.on("entry", function(entry) { + console.log(entry.path()); + }); + + // Don't forget to call `start()`! + walker.start(); + }) + .done(); diff --git a/generate/index.js b/generate/index.js new file mode 100644 index 000000000..6bdaa42ef --- /dev/null +++ b/generate/index.js @@ -0,0 +1,38 @@ +var generateJson = require("./scripts/generateJson"); +var generateNativeCode = require("./scripts/generateNativeCode"); +var generateMissingTests = require("./scripts/generateMissingTests"); +var submoduleStatus = require("../lifecycleScripts/submodules/getStatus"); + +module.exports = function generate() { + console.log("[nodegit] Generating native code"); + + return submoduleStatus() + .then(function(statuses) { + var dirtySubmodules = statuses + .filter(function(status) { + return status.onNewCommit + || status.needsInitialization + || status.workDirDirty; + }); + + if (dirtySubmodules.length) { + console.warn("[nodegit] WARNING - Some submodules are out-of-sync"); + dirtySubmodules.forEach(function(submodule) { + console.warn("[nodegit]\t" + submodule.name); + }); + } + }) + .then(function() { + generateJson(); + generateNativeCode(); + generateMissingTests(); + }) + .catch(function(e) { + console.error("[nodegit] ERROR - Could not generate native code"); + console.error(e); + }); +} + +if (require.main === module) { + module.exports(); +} diff --git a/generate/input/callbacks.json b/generate/input/callbacks.json new file mode 100644 index 000000000..999ccd57f --- /dev/null +++ b/generate/input/callbacks.json @@ -0,0 +1,816 @@ +{ + "git_attr_foreach_cb": { + "args": [ + { + "name": "name", + "cType": "const char *" + }, + { + "name": "value", + "cType": "const char *" + }, + { + "name": "payload", + "cType": "void *" + } + ], + "return": { + "type": "int", + "noResults": 1, + "success": 0, + "error": -1 + } + }, + "git_blob_chunk_cb": { + "args": [ + { + "name": "entry", + "cType": "const git_config_entry *" + }, + { + "name": "payload", + "cType": "void *" + } + ], + "return": { + "type": "int", + "noResults": 1, + "success": 0, + "error": -1 + } + }, + "git_checkout_notify_cb": { + "args": [ + { + "name": "why", + "cType": "git_checkout_notify_t" + }, + { + "name": "path", + "cType": "const char *" + }, + { + "name": "baseline", + "cType": "const git_diff_file *" + }, + { + "name": "target", + "cType": "const git_diff_file *" + }, + { + "name": "workdir", + "cType": "const git_diff_file *" + }, + { + "name": "payload", + "cType": "void *" + } + ], + "return": { + "type": "int", + "noResults": 1, + "success": 0, + "error": -1 + } + }, + "git_checkout_progress_cb": { + "args": [ + { + "name": "path", + "cType": "const char *" + }, + { + "name": "completed_steps", + "cType": "size_t" + }, + { + "name": "total_steps", + "cType": "size_t" + }, + { + "name": "payload", + "cType": "void *" + } + ], + "return": { + "type": "int", + "noResults": 1, + "success": 0, + "error": -1, + "throttle": 100 + } + }, + "git_checkout_perfdata_cb": { + "args": [ + { + "name": "perfdata", + "cType": "const git_checkout_perfdata *" + }, + { + "name": "payload", + "cType": "void *" + } + ], + "return": { + "type": "int", + "noResults": 1, + "success": 0, + "error": -1 + } + }, + "git_config_foreach_cb": { + "args": [ + { + "name": "entry", + "cType": "const git_config_entry *" + }, + { + "name": "payload", + "cType": "void *" + } + ], + "return": { + "type": "int", + "noResults": 1, + "success": 0, + "error": -1 + } + }, + "git_cred_acquire_cb": { + "args": [ + { + "name": "cred", + "cType": "git_cred **", + "isReturn": true + }, + { + "name": "url", + "cType": "const char *" + }, + { + "name": "username_from_url", + "cType": "const char *" + }, + { + "name": "allowed_types", + "cType": "unsigned int" + }, + { + "name": "payload", + "cType": "void *" + } + ], + "return": { + "type": "int", + "noResults": 1, + "success": 0, + "error": -1 + } + }, + "git_diff_binary_cb": { + "args": [ + { + "name": "delta", + "cType": "const git_diff_delta *" + }, + { + "name": "binary", + "cType": "const git_diff_binary *" + }, + { + "name": "payload", + "cType": "void *" + } + ], + "return": { + "type": "int", + "noResults": 0, + "success": 0, + "error": -1 + } + }, + "git_diff_file_cb": { + "args": [ + { + "name": "delta", + "cType": "const git_diff_delta *" + }, + { + "name": "progress", + "cType": "float" + }, + { + "name": "payload", + "cType": "void *" + } + ], + "return": { + "type": "int", + "noResults": 1, + "success": 0, + "error": -1, + "throttle": 100 + } + }, + "git_diff_hunk_cb": { + "args": [ + { + "name": "delta", + "cType": "const git_diff_delta *" + }, + { + "name": "hunk", + "cType": "const git_diff_hunk *" + }, + { + "name": "payload", + "cType": "void *" + } + ], + "return": { + "type": "int", + "noResults": 1, + "success": 0, + "error": -1 + } + }, + "git_diff_line_cb": { + "args": [ + { + "name": "delta", + "cType": "const git_diff_delta *" + }, + { + "name": "hunk", + "cType": "const git_diff_hunk *" + }, + { + "name": "line", + "cType": "const git_diff_line *" + }, + { + "name": "payload", + "cType": "void *" + } + ], + "return": { + "type": "int", + "noResults": 1, + "success": 0, + "error": -1 + } + }, + "git_diff_notify_cb": { + "args": [ + { + "name": "diff_so_far", + "cType": "const git_diff *" + }, + { + "name": "delta_to_add", + "cType": "git_diff_delta *" + }, + { + "name": "matched_pathspec", + "cType": "const char *" + }, + { + "name": "payload", + "cType": "void *" + } + ], + "return": { + "type": "int", + "noResults": 1, + "success": 0, + "error": -1 + } + },"git_diff_progress_cb": { + "args": [ + { + "name": "diff_so_far", + "cType": "const git_diff *" + }, + { + "name": "old_path", + "cType": "const char *" + }, + { + "name": "new_path", + "cType": "const char *" + }, + { + "name": "payload", + "cType": "void *" + } + ], + "return": { + "type": "int", + "noResults": 1, + "success": 0, + "error": -1 + } + }, + "git_index_matched_path_cb": { + "args": [ + { + "name": "path", + "cType": "const char *" + }, + { + "name": "matched_pathspec", + "cType": "const char *" + }, + { + "name": "payload", + "cType": "void *" + } + ], + "return": { + "type": "int", + "noResults": 1, + "success": 0, + "error": -1 + } + }, + "git_note_foreach_cb": { + "args": [ + { + "name": "blob_id", + "cType": "const git_oid *" + }, + { + "name": "annotated_object_id", + "cType": "const git_oid *" + }, + { + "name": "payload", + "cType": "void *" + } + ], + "return": { + "type": "int", + "noResults": 0, + "success": 0, + "error": -1 + } + }, + "git_odb_foreach_cb": { + "args": [ + { + "name": "id", + "cType": "const git_oid *" + }, + { + "name": "payload", + "cType": "void *" + } + ] + }, + "git_packbuilder_foreach_cb": { + "args": [ + { + "name": "buf", + "cType": "void *" + }, + { + "name": "size", + "cType": "size_t" + }, + { + "name": "payload", + "cType": "void *" + } + ], + "return": { + "type": "int", + "noResults": 1, + "success": 0, + "error": -1 + } + }, + "git_remote_create_cb": { + "args": [ + { + "name": "out", + "cType": "git_repository **", + "isReturn": true + }, + { + "name": "repo", + "cType": "git_repository *" + }, + { + "name": "name", + "cType": "const char *" + }, + { + "name": "url", + "cType": "const char *" + }, + { + "name": "payload", + "cType": "void *" + } + ], + "return": { + "type": "int", + "noResults": 0, + "success": 0, + "error": 1 + } + }, + "git_repository_create_cb": { + "args": [ + { + "name": "out", + "cType": "git_repository **", + "isReturn": true + }, + { + "name": "path", + "cType": "const char *" + }, + { + "name": "bare", + "cType": "int" + }, + { + "name": "payload", + "cType": "void *" + } + ], + "return": { + "type": "int", + "noResults": 0, + "success": 0, + "error": 1 + } + }, + "git_reference_foreach_cb": { + "args": [ + { + "name": "refname", + "cType": "const char *" + }, + { + "name": "payload", + "cType": "void *" + } + ], + "return": { + "type": "int", + "noResults": 1, + "success": 0, + "error": -1 + } + }, + "git_reference_foreach_name_cb": { + "args": [ + { + "name": "name", + "cType": "const char *" + }, + { + "name": "payload", + "cType": "void *" + } + ], + "return": { + "type": "int", + "noResults": 1, + "success": 0, + "error": -1 + } + }, + "git_repository_fetchhead_foreach_cb": { + "args": [ + { + "name": "refname", + "cType": "const char *" + }, + { + "name": "remote_url", + "cType": "const char *" + }, + { + "name": "oid", + "cType": "const git_oid *" + }, + { + "name": "is_merge", + "cType": "unsigned int" + }, + { + "name": "payload", + "cType": "void *" + } + ], + "return": { + "type": "int", + "noResults": 0, + "success": 0, + "error": 1 + } + }, + "git_repository_mergehead_foreach_cb": { + "args": [ + { + "name": "oid", + "cType": "const git_oid *" + }, + { + "name": "payload", + "cType": "void *" + } + ], + "return": { + "type": "int", + "noResults": 0, + "success": 0, + "error": 1 + } + }, + "git_revwalk_hide_cb": { + "args": [ + { + "name": "commit_id", + "cType": "const git_oid *" + }, + { + "name": "payload", + "cType": "void *" + } + ], + "return": { + "type": "int", + "noResults": 1, + "success": 0, + "error": -1 + } + }, + "git_smart_subtransport_cb": { + "args": [ + { + "name": "out", + "cType": "git_smart_subtransport **", + "isReturn": true + }, + { + "name": "owner", + "cType": "git_transport*" + }, + { + "name": "param", + "cType": "void *" + } + ], + "return": { + "type": "int", + "noResults": 0, + "success": 0, + "error": -1 + } + }, + "git_stash_apply_progress_cb": { + "args": [ + { + "name": "progress", + "cType": "git_stash_apply_progress_t" + }, + { + "name": "payload", + "cType": "void *" + } + ], + "return": { + "type": "int", + "noResults":0, + "success": 0, + "error": -1, + "throttle": 100 + } + }, + "git_stash_cb": { + "args": [ + { + "name": "index", + "cType": "size_t" + }, + { + "name": "message", + "cType": "const char *" + }, + { + "name": "stash_id", + "cType": "const git_oid *" + }, + { + "name": "payload", + "cType": "void *" + } + ], + "return": { + "type": "int", + "noResults":0, + "success": 0, + "error": -1 + } + }, + "git_status_cb": { + "args": [ + { + "name": "path", + "cType": "const char *" + }, + { + "name": "status_flags", + "cType": "unsigned int" + }, + { + "name": "payload", + "cType": "void *" + } + ], + "return": { + "type": "int", + "noResults": 0, + "success": 0, + "error": -1 + } + }, + "git_submodule_cb": { + "args": [ + { + "name": "sm", + "cType": "git_submodule *" + }, + { + "name": "name", + "cType": "const char *" + }, + { + "name": "payload", + "cType": "void *" + } + ], + "return": { + "type": "int", + "noResults": 0, + "success": 0, + "error": -1 + } + }, + "git_tag_foreach_cb": { + "args": [ + { + "name": "name", + "cType": "const char *" + }, + { + "name": "oid", + "cType": "const git_oid *" + }, + { + "name": "payload", + "cType": "void *" + } + ], + "return": { + "type": "int", + "noResults": 1, + "success": 0, + "error": -1 + } + }, + "git_transfer_progress_cb": { + "args": [ + { + "name": "stats", + "cType": "const git_transfer_progress *" + }, + { + "name": "payload", + "cType": "void *" + } + ], + "return": { + "type": "int", + "noResults": 0, + "success": 0, + "error": -1, + "throttle": 100 + } + }, + "git_transport_cb": { + "args": [ + { + "name": "out", + "cType": "git_transport **" + }, + { + "name": "owner", + "cType": "git_remote *" + }, + { + "name": "param", + "cType": "void *", + "payload": true + } + ], + "return": { + "type": "int", + "noResults": 1, + "success": 0, + "error": -1 + } + }, + "git_transport_certificate_check_cb": { + "args": [ + { + "name": "cert", + "cType": "git_cert *" + }, + { + "name": "valid", + "cType": "int" + }, + { + "name": "host", + "cType": "const char *" + }, + { + "name": "payload", + "cType": "void *" + } + ], + "return": { + "type": "int", + "noResults": 1, + "success": 0, + "error": -1 + } + }, + "git_transport_message_cb": { + "args": [ + { + "name": "str", + "cType": "const char *" + }, + { + "name": "len", + "cType": "int" + }, + { + "name": "payload", + "cType": "void *" + } + ], + "return": { + "type": "int", + "noResults": 1, + "success": 0, + "error": -1 + } + }, + "git_treebuilder_filter_cb": { + "args": [ + { + "name": "entry", + "cType": "const git_tree_entry *" + }, + { + "name": "payload", + "cType": "void *" + } + ], + "return": { + "type": "int", + "noResults": 1, + "success": 0, + "error": -1 + } + }, + "git_treewalk_cb": { + "args": [ + { + "name": "root", + "cType": "const char *" + }, + { + "name": "entry", + "cType": "const git_tree_entry *" + }, + { + "name": "payload", + "cType": "void *" + } + ], + "return": { + "type": "int", + "noResults": 1, + "success": 0, + "error": -1 + } + } +} diff --git a/generate/input/descriptor.json b/generate/input/descriptor.json new file mode 100644 index 000000000..044757eda --- /dev/null +++ b/generate/input/descriptor.json @@ -0,0 +1,2562 @@ +{ + "enums": { + "attr": { + "JsName": "STATES", + "isMask": false + }, + "branch": { + "JsName": "BRANCH", + "isMask": false + }, + "cert": { + "JsName": "TYPE", + "isMask": false + }, + "clone_local": { + "isMask": false, + "values": { + "GIT_CLONE_LOCAL": { + "JsName": "LOCAL" + } + } + }, + "describe_strategy": { + "ignore": true + }, + "delta": { + "owner": "Diff" + }, + "error": { + "JsName": "ERROR", + "isMask": false + }, + "error_code": { + "values": { + "GIT_ERROR": { + "JsName": "ERROR" + } + } + }, + "filemode": { + "owner": "TreeEntry", + "values": { + "GIT_FILEMODE_BLOB_EXECUTABLE": { + "JsName": "EXECUTABLE" + } + } + }, + "repository_init_flag": { + "removeString": "INIT_" + }, + "otype": { + "JsName": "TYPE", + "owner": "Object", + "removeString": "OBJ_" + }, + "proxy": { + "JsName": "PROXY", + "isMask": false + }, + "ref": { + "owner": "Reference", + "JsName": "TYPE" + }, + "reset": { + "JsName": "TYPE", + "isMask": false + }, + "sort": { + "owner": "Revwalk" + }, + "status": { + "JsName": "STATUS", + "isMask": false + } + }, + "types": + { + "annotated_commit": { + "functions": { + "git_annotated_commit_id": { + "return": { + "ownedByThis": true + } + } + } + }, + "attr": { + "functions": { + "git_attr_foreach": { + "ignore": true + }, + "git_attr_get": { + "isAsync": true, + "args": { + "value_out": { + "isReturn": true + } + } + }, + "git_attr_get_many": { + "args": { + "values_out": { + "isReturn": true, + "jsClassName": "Array" + } + } + } + } + }, + "blame": { + "cType": "git_blame", + "functions": { + "git_blame_file": { + "args": { + "options": { + "isOptional": true + } + } + } + } + }, + "blame_hunk": { + "fields": { + "boundary": { + "ignore": true + } + } + }, + "blob": { + "selfFreeing": true, + "functions": { + "git_blob_create_frombuffer": { + "isAsync": false, + "args": { + "id": { + "isReturn": true + }, + "buffer": { + "cppClassName": "Buffer", + "jsClassName": "Buffer" + } + } + }, + "git_blob_create_fromchunks": { + "ignore": true + }, + "git_blob_filtered_content": { + "ignore": true + }, + "git_blob_id": { + "return": { + "ownedByThis": true + } + }, + "git_blob_rawcontent": { + "return": { + "cppClassName": "Wrapper", + "jsClassName": "Buffer" + } + } + }, + "dependencies": [ + "../include/wrapper.h", + "node_buffer.h" + ] + }, + "branch": { + "functions": { + "git_branch_iterator_free": { + "ignore": true + }, + "git_branch_create": { + "args": { + "force": { + "isOptional": true + }, + "signature": { + "isOptional": true + }, + "log_message": { + "isOptional": true + } + } + }, + "git_branch_create_from_annotated": { + "args": { + "ref_out": { + "isReturn": true + }, + "repository": {}, + "branch_name": {}, + "commit": {}, + "force": { + "isOptional": true + } + } + }, + "git_branch_next": { + "ignore": true + }, + "git_branch_set_upstream": { + "isAsync": true, + "args": { + "upstream_name": { + "isOptional": true + } + }, + "return" : { + "isReturn": true, + "isErrorCode": true + } + }, + "git_branch_upstream": { + "isAsync": true, + "return" : { + "isReturn": true, + "isErrorCode": true + } + } + } + }, + "buf": { + "functions": { + "git_buf_grow": { + "cppFunctionName": "Grow", + "jsFunctionName": "grow", + "args": { + "buffer": { + "isReturn": true, + "isSelf": false, + "shouldAlloc": true + } + }, + "return": { + "cppClassName": "Number", + "jsClassName": "Number", + "isErrorCode": true + }, + "isAsync": true + }, + "git_buf_set": { + "cppFunctionName": "Set", + "jsFunctionName": "set", + "args": { + "buffer": { + "isReturn": true, + "isSelf": false, + "shouldAlloc": true + }, + "data": { + "cppClassName": "Buffer", + "jsClassName": "Buffer" + } + }, + "return": { + "cppClassName": "Number", + "jsClassName": "Number", + "isErrorCode": true + }, + "isAsync": true + } + }, + "dependencies": [ + "../include/git_buf_converter.h" + ] + }, + "cert_hostkey": { + "fields": { + "hash_md5": { + "cppClassName": "String", + "size": 16 + }, + "hash_sha1": { + "cppClassName": "String", + "size": 20 + } + } + }, + "cert_x509": { + "fields": { + "data": { + "cppClassName": "Wrapper", + "jsClassName": "Buffer" + } + }, + "dependencies": [ + "../include/wrapper.h", + "node_buffer.h" + ] + }, + "checkout": { + "functions": { + "git_checkout_head": { + "args": { + "opts": { + "isOptional": true + } + }, + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_checkout_index": { + "args": { + "opts": { + "isOptional": true + } + }, + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_checkout_tree": { + "args": { + "treeish": { + "isOptional": true + }, + "opts": { + "isOptional": true + } + }, + "isAsync": true, + "return": { + "isErrorCode": true + } + } + } + }, + "cherrypick": { + "functions": { + "git_cherrypick": { + "isAsync": true, + "return": { + "isErrorCode": true + } + } + } + }, + "clone": { + "functions": { + "git_clone": { + "args": { + "options": { + "isOptional": true + } + } + } + } + }, + "clone_options": { + "fields": { + "repository_cb": { + "ignore": true + }, + "repository_cb_payload": { + "ignore": true + }, + "remote_cb": { + "ignore": true + }, + "remote_cb_payload": { + "ignore": true + } + } + }, + "commit": { + "selfFreeing": true, + "functions": { + "git_commit_amend": { + "args": { + "author": { + "isOptional": true + }, + "committer": { + "isOptional": true + }, + "id": { + "isReturn": true + }, + "message_encoding": { + "isOptional": true + }, + "message": { + "isOptional": true + }, + "tree": { + "isOptional": true + }, + "update_ref": { + "isOptional": true + } + } + }, + "git_commit_author": { + "return": { + "ownedByThis": true + } + }, + "git_commit_committer": { + "return": { + "ownedByThis": true + } + }, + "git_commit_create": { + "args": { + "id": { + "isReturn": true + }, + "message_encoding": { + "isOptional": true + }, + "parents": { + "cType": "const git_commit **", + "cppClassName": "Array", + "jsClassName": "Array", + "arrayElementCppClassName": "GitCommit" + }, + "update_ref": { + "isOptional": true + } + }, + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_commit_create_buffer": { + "ignore": true + }, + "git_commit_create_from_callback": { + "ignore": true + }, + "git_commit_create_from_ids": { + "ignore": true + }, + "git_commit_create_from_v": { + "ignore": true + }, + "git_commit_extract_signature": { + "ignore": true + }, + "git_commit_id": { + "return": { + "ownedByThis": true + } + }, + "git_commit_parent_id": { + "return": { + "ownedByThis": true + } + }, + "git_commit_tree_id": { + "return": { + "ownedByThis": true + } + } + } + }, + "config": { + "functions": { + "git_config_add_backend": { + "ignore": true + }, + "git_config_add_file_ondisk": { + "ignore": true + }, + "git_config_backend_foreach_match": { + "ignore": true + }, + "git_config_delete_entry": { + "ignore": true + }, + "git_config_delete_multivar": { + "ignore": true + }, + "git_config_entry_free": { + "ignore": true + }, + "git_config_find_global": { + "ignore": true + }, + "git_config_find_system": { + "ignore": true + }, + "git_config_find_xdg": { + "ignore": true + }, + "git_config_foreach": { + "ignore": true + }, + "git_config_foreach_match": { + "ignore": true + }, + "git_config_free": { + "ignore": true + }, + "git_config_get_bool": { + "ignore": true + }, + "git_config_get_entry": { + "ignore": true + }, + "git_config_get_int32": { + "ignore": true + }, + "git_config_get_int64": { + "ignore": true + }, + "git_config_get_mapped": { + "ignore": true + }, + "git_config_get_multivar_foreach": { + "ignore": true + }, + "git_config_get_string": { + "ignore": true + }, + "git_config_get_string_buf": { + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_config_get_path": { + "ignore": true + }, + "git_config_iterator_free": { + "ignore": true + }, + "git_config_iterator_glob_new": { + "ignore": true + }, + "git_config_iterator_new": { + "ignore": true + }, + "git_config_init_backend": { + "ignore": true + }, + "git_config_lookup_map_value": { + "ignore": true + }, + "git_config_multivar_iterator_new": { + "ignore": true + }, + "git_config_new": { + "ignore": true + }, + "git_config_next": { + "ignore": true + }, + "git_config_open_default": { + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_config_open_global": { + "ignore": true + }, + "git_config_open_level": { + "ignore": true + }, + "git_config_open_ondisk": { + "ignore": true + }, + "git_config_parse_bool": { + "ignore": true + }, + "git_config_parse_int32": { + "ignore": true + }, + "git_config_parse_int64": { + "ignore": true + }, + "git_config_parse_path": { + "ignore": true + }, + "git_config_refresh": { + "ignore": true + }, + "git_config_set_bool": { + "ignore": true + }, + "git_config_set_int32": { + "ignore": true + }, + "git_config_set_string": { + "isAsync": true, + "return": { + "isErrorCode": true + } + } + }, + "dependencies": [ + "../include/git_buf_converter.h" + ] + }, + "config_backend": { + "ignore": true + }, + "config_iterator": { + "ignore": true + }, + "cred": { + "cType": "git_cred", + "functions": { + "git_cred_default_new": { + "isAsync": false + }, + "git_cred_ssh_custom_new": { + "ignore": true + }, + "git_cred_ssh_interactive_new": { + "ignore": true + }, + "git_cred_ssh_key_from_agent": { + "isAsync": false + }, + "git_cred_ssh_key_new": { + "isAsync": false + }, + "git_cred_userpass": { + "ignore": true + }, + "git_cred_userpass_plaintext_new": { + "isAsync": false + } + } + }, + "cred_ssh_custom": { + "ignore": true + }, + "cred_ssh_interactive": { + "ignore": true + }, + "cred_ssh_key": { + "ignore": true + }, + "cred_username": { + "fields": { + "username": { + "cppClassName": "String", + "cType": "char *" + } + } + }, + "cred_userpass_payload": { + "cDependencies": [ + "git2/cred_helpers.h" + ] + }, + "cred_userpass_plaintext": { + "ignore": true + }, + "describe": { + "ignore": true + }, + "diff": { + "cDependencies": [ + "git2/sys/diff.h" + ], + "functions": { + "git_diff_blob_to_buffer": { + "args": { + "old_blob" : { + "isOptional": true + }, + "old_as_path" : { + "isOptional": true + }, + "buffer" : { + "isOptional": true + }, + "buffer_len" : { + "isOptional": true + }, + "buffer_as_path" : { + "isOptional": true + }, + "options" : { + "isOptional": true + }, + "file_cb" : { + "isOptional": true + }, + "binary_cb": { + "isOptional": true + }, + "hunk_cb" : { + "isOptional": true + }, + "line_cb" : { + "isOptional": true + } + }, + "return": { + "isErrorCode": true + }, + "isAsync": true + }, + "git_diff_blobs": { + "ignore": true + }, + "git_diff_buffers": { + "ignore": true + }, + "git_diff_commit_as_email": { + "ignore": true + }, + "git_diff_find_init_options": { + "ignore": true + }, + "git_diff_find_similar": { + "args": { + "diff": { + "isSelf": true + }, + "options": { + "isOptional": true + } + }, + "return": { + "cppClassName": "Number", + "jsClassName": "Number", + "isErrorCode": true + }, + "isAsync": true + }, + "git_diff_foreach": { + "ignore": true + }, + "git_diff_format_email": { + "ignore": true + }, + "git_diff_format_email_init_options": { + "ignore": true + }, + "git_diff_free": { + "ignore": true + }, + "git_diff_get_stats": { + "ignore": true + }, + "git_diff_index_to_workdir": { + "args": { + "index": { + "isOptional": true + }, + "opts": { + "isOptional": true + } + } + }, + "git_diff_init_options": { + "ignore": true + }, + "git_diff_is_sorted_icase": { + "ignore": true + }, + "git_diff_merge": { + "isAsync": true, + "args": { + "onto": { + "isSelf": true + } + }, + "return": { + "isErrorCode": true + } + }, + "git_diff_num_deltas_of_type": { + "ignore": true + }, + "git_diff_print": { + "ignore": true + }, + "git_diff_print_callback__to_buf": { + "ignore": true + }, + "git_diff_print_callback__to_file_handle": { + "ignore": true + }, + "git_diff_stats_deletions": { + "ignore": true + }, + "git_diff_stats_files_changed": { + "ignore": true + }, + "git_diff_stats_free": { + "ignore": true + }, + "git_diff_stats_insertions": { + "ignore": true + }, + "git_diff_stats_to_buf": { + "ignore": true + }, + "git_diff_status_char": { + "ignore": true + }, + "git_diff_tree_to_index": { + "args": { + "old_tree": { + "isOptional": true + }, + "index": { + "isOptional": true + }, + "opts": { + "isOptional": true + } + } + }, + "git_diff_tree_to_tree": { + "args": { + "old_tree": { + "isOptional": true + }, + "new_tree": { + "isOptional": true + }, + "opts": { + "isOptional": true + } + } + }, + "git_diff_tree_to_workdir": { + "args": { + "old_tree": { + "isOptional": true + }, + "opts": { + "isOptional": true + } + } + }, + "git_diff_tree_to_workdir_with_index": { + "args": { + "old_tree": { + "isOptional": true + }, + "opts": { + "isOptional": true + } + } + } + } + }, + "diff_find_options": { + "hasConstructor": true, + "fields": { + "git_diff_similarity_metric": { + "ignore": true + } + } + }, + "diff_format_email_options": { + "ignore": true + }, + "diff_perfdata": { + "cDependencies": [ + "git2/sys/diff.h" + ] + }, + "diff_similarity_metric": { + "ignore": true + }, + "error_code": { + "values": { + "GIT_ERROR": { + "JsName": "ERROR" + } + } + }, + "filter": { + "functions": { + "git_filter_list_apply_to_blob": { + "ignore": true + }, + "git_filter_list_apply_to_data": { + "ignore": true + }, + "git_filter_list_apply_to_file": { + "ignore": true + }, + "git_filter_list_free": { + "ignore": true + }, + "git_filter_list_load": { + "ignore": true + }, + "git_filter_list_push": { + "ignore": true + }, + "git_filter_source_filemode": { + "ignore": true + }, + "git_filter_source_flags": { + "ignore": true + }, + "git_filter_source_id": { + "ignore": true + }, + "git_filter_source_mode": { + "ignore": true + }, + "git_filter_source_options": { + "ignore": true + }, + "git_filter_source_path": { + "ignore": true + }, + "git_filter_source_repo": { + "ignore": true + } + }, + "fields": { + "initialize": { + "ignore": true + }, + "shutdown": { + "ignore": true + }, + "check": { + "ignore": true + }, + "apply": { + "ignore": true + }, + "cleanup": { + "ignore": true + } + }, + "cDependencies": [ + "git2/sys/filter.h" + ] + }, + "filter_source": { + "ignore": true + }, + "graph": { + "functions": { + "git_graph_ahead_behind": { + "args": { + "ahead": { + "shouldAlloc": true, + "isReturn": true + }, + "behind": { + "shouldAlloc": true, + "isReturn": true + } + }, + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_graph_descendant_of": { + "isAsync": true, + "return": { + "isResultOrError": true + } + } + } + }, + "hashsig": { + "cDependencies": [ + "git2/sys/hashsig.h" + ] + }, + "ignore": { + "functions": { + "git_ignore_path_is_ignored": { + "args": { + "ignored": { + "shouldAlloc": true, + "isReturn": true + } + }, + "isAsync": true + } + } + }, + "index": { + "functions": { + "git_index_add": { + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_index_add_all": { + "args": { + "pathspec": { + "isOptional": true + }, + "flags": { + "isOptional": true + }, + "callback": { + "isOptional": true + } + }, + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_index_add_bypath": { + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_index_add_frombuffer": { + "ignore": true + }, + "git_index_checksum": { + "return": { + "ownedByThis": true + } + }, + "git_index_clear": { + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_index_conflict_add": { + "isAsync": true, + "args": { + "index": { + "isSelf": true + }, + "ancestor_entry": { + "isOptional": true + }, + "our_entry": { + "isOptional": true + }, + "their_entry": { + "isOptional": true + } + }, + "return": { + "isErrorCode": true + } + }, + "git_index_conflict_cleanup": { + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_index_conflict_get": { + "args": { + "ancestor_out": { + "isReturn": true + }, + "our_out": { + "isReturn": true + }, + "their_out": { + "isReturn": true + } + }, + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_index_conflict_iterator_free": { + "ignore": true + }, + "git_index_conflict_iterator_new": { + "ignore": true + }, + "git_index_conflict_next": { + "ignore": true + }, + "git_index_conflict_remove": { + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_index_entrycount": { + "jsFunctionName": "entryCount" + }, + "git_index_find": { + "ignore": true + }, + "git_index_free": { + "ignore": true + }, + "git_index_get_bypath": { + "args": { + "stage": { + "isOptional": true + } + } + }, + "git_index_new": { + "ignore": true + }, + "git_index_open": { + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_index_read": { + "args": { + "force": { + "isOptional": true + } + }, + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_index_read_tree": { + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_index_remove": { + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_index_remove_all": { + "args": { + "pathspec": { + "isOptional": true + }, + "flags": { + "isOptional": true + }, + "callback": { + "isOptional": true + } + }, + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_index_remove_bypath": { + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_index_remove_directory": { + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_index_update_all": { + "args": { + "pathspec": { + "isOptional": true + }, + "flags": { + "isOptional": true + }, + "callback": { + "isOptional": true + } + }, + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_index_write": { + "args": { + "force": { + "isOptional": true + } + }, + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_index_write_tree": { + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_index_write_tree_to": { + "isAsync": true, + "return": { + "isErrorCode": true + } + } + }, + "dependencies": [ + "../include/str_array_converter.h" + ] + }, + "index_entry": { + "hasConstructor": true, + "ignoreInit": true + }, + "indexer": { + "cType": "git_indexer", + "functions": { + "git_indexer_append": { + "ignore": true + }, + "git_indexer_hash": { + "return": { + "ownedByThis": true + } + }, + "git_indexer_new": { + "ignore": true + } + } + }, + "mempack": { + "functions": { + "git_mempack_new": { + "ignore": true + }, + "git_mempack_reset": { + "ignore": true + } + } + }, + "merge": { + "functions": { + "git_merge": { + "args": { + "their_heads": { + "cType": "const git_annotated_commit **", + "cppClassName": "Array", + "jsClassName": "Array", + "arrayElementCppClassName": "GitAnnotatedCommit" + } + } + }, + "git_merge_analysis": { + "ignore": true + }, + "git_merge_base_many": { + "ignore": true + }, + "git_merge_bases_many": { + "ignore": true + }, + "git_merge_base_octopus": { + "ignore": true + }, + "git_merge_commits": { + "args": { + "opts": { + "isOptional": true + } + } + }, + "git_merge_file": { + "ignore": true + }, + "git_merge_file_from_index": { + "ignore": true + }, + "git_merge_file_init_options": { + "ignore": true + }, + "git_merge_file_result_free": { + "ignore": true + } + } + }, + "merge_driver": { + "ignore": true + }, + "message": { + "functions": { + "git_message_prettify": { + "ignore": true + } + } + }, + "note": { + "functions": { + "git_note_iterator_free": { + "ignore": true + }, + "git_note_create": { + "args": { + "out": { + "shouldAlloc": true + } + } + }, + "git_note_id": { + "return": { + "ownedByThis": true + } + }, + "git_note_remove": { + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_note_foreach": { + "isAsync": true, + "return": { + "isErrorCode": true + } + } + } + }, + "object": { + "functions": { + "git_object_id": { + "return": { + "ownedByThis": true + } + }, + "git_object_short_id": { + "args": { + "out": { + "shouldAlloc": true + } + } + } + } + }, + "odb": { + "functions": { + "git_odb_add_alternate": { + "ignore": true + }, + "git_odb_add_backend": { + "ignore": true + }, + "git_odb_backend_loose": { + "ignore": true + }, + "git_odb_backend_one_pack": { + "ignore": true + }, + "git_odb_backend_pack": { + "ignore": true + }, + "git_odb_exists": { + "ignore": true + }, + "git_odb_exists_prefix": { + "ignore": true + }, + "git_odb_foreach": { + "ignore": true + }, + "git_odb_get_backend": { + "ignore": true + }, + "git_odb_hash": { + "ignore": true + }, + "git_odb_hashfile": { + "ignore": true + }, + "git_odb_init_backend": { + "ignore": true + }, + "git_odb_new": { + "ignore": true + }, + "git_odb_num_backends": { + "ignore": true + }, + "git_odb_open_rstream": { + "ignore": true + }, + "git_odb_open_wstream": { + "ignore": true + }, + "git_odb_read_header": { + "ignore": true + }, + "git_odb_read_prefix": { + "ignore": true + }, + "git_odb_refresh": { + "ignore": true + }, + "git_odb_stream_finalize_write": { + "ignore": true + }, + "git_odb_stream_free": { + "ignore": true + }, + "git_odb_stream_read": { + "ignore": true + }, + "git_odb_stream_write": { + "ignore": true + }, + "git_odb_write": { + "args": { + "data": { + "cppClassName": "Wrapper", + "jsClassName": "Buffer" + } + } + }, + "git_odb_write_pack": { + "ignore": true + } + } + }, + "odb_backend": { + "fields": { + "foreach": { + "ignore": true + }, + "writepack": { + "ignore": true + } + }, + "ignore": true + }, + "odb_object": { + "functions": { + "git_odb_object_data": { + "return": { + "cppClassName": "Wrapper", + "jsClassName": "Buffer" + } + }, + "git_odb_object_id": { + "return": { + "ownedByThis": true + } + } + }, + "dependencies": [ + "../include/wrapper.h", + "node_buffer.h" + ] + }, + "odb_stream": { + "ignore": true + }, + "odb_writepack": { + "ignore": true + }, + "oid": { + "selfFreeing": true, + "cpyFunction": "git_oid_cpy", + "freeFunctionName": "free", + "shouldAlloc": true, + "functions": { + "git_oid_cpy": { + "args": { + "out": { + "isReturn": true + } + } + }, + "git_oid_fmt": { + "ignore": true + }, + "git_oid_fromraw": { + "ignore": true + }, + "git_oid_fromstr": { + "isAsync": false + }, + "git_oid_fromstrn": { + "ignore": true + }, + "git_oid_fromstrp": { + "ignore": true + }, + "git_oid_nfmt": { + "ignore": true + }, + "git_oid_pathfmt": { + "ignore": true + }, + "git_oid_shorten_add": { + "ignore": true + }, + "git_oid_shorten_free": { + "ignore": true + }, + "git_oid_shorten_new": { + "ignore": true + }, + "git_oid_tostr": { + "ignore": true + } + }, + "fields": { + "id": { + "ignore": true + } + } + }, + "openssl": { + "cDependencies": [ + "git2/sys/openssl.h" + ] + }, + "packbuilder": { + "functions": { + "git_packbuilder_foreach": { + "ignore": true + }, + "git_packbuilder_hash": { + "return": { + "ownedByThis": true + } + }, + "git_packbuilder_new": { + "isAsync": false + }, + "git_packbuilder_set_callbacks": { + "ignore": true + }, + "git_packbuilder_write": { + "ignore": true + } + } + }, + "patch": { + "dependencies": [ + "../include/convenient_patch.h" + ], + "functions": { + "git_patch_free": { + "ignore": true + }, + "git_patch_from_buffers": { + "ignore": true + }, + "git_patch_from_diff": { + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_patch_get_hunk": { + "args": { + "out": { + "returnName": "hunk" + }, + "lines_in_hunk": { + "shouldAlloc": true, + "returnName": "linesInHunk", + "isReturn": true + } + }, + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_patch_get_line_in_hunk": { + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_patch_line_stats": { + "args": { + "total_context": { + "isReturn": true + }, + "total_additions": { + "isReturn": true + }, + "total_deletions": { + "isReturn": true + } + }, + "return": { + "isErrorCode": true + } + }, + "git_patch_print": { + "ignore": true + }, + "git_patch_to_buf": { + "ignore": true + } + } + }, + "pathspec": { + "functions": { + "git_pathspec_match_list_free": { + "ignore": true + }, + "git_pathspec_new": { + "isAsync": false + } + } + }, + "push": { + "cType": "git_push", + "functions": { + "git_push_finish": { + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_push_set_callbacks": { + "ignore": true + }, + "git_push_status_foreach": { + "ignore": true + } + } + }, + "rebase": { + "functions": { + "git_rebase_commit": { + "args": { + "id": { + "isReturn": true, + "shouldAlloc": true + }, + "author": { + "isOptional": true + }, + "message_encoding": { + "isOptional": true + }, + "message": { + "isOptional": true + } + } + }, + "git_rebase_finish": { + "args": { + "signature": { + "isOptional": true + } + } + }, + "git_rebase_free": { + "ignore": true + }, + "git_rebase_init": { + "args": { + "upstream": { + "isOptional": true + }, + "onto": { + "isOptional": true + }, + "signature": { + "isOptional": true + }, + "opts": { + "isOptional": true + } + } + } + } + }, + "refdb": { + "functions": { + "git_refdb_backend_fs": { + "ignore": true + }, + "git_refdb_init_backend": { + "ignore": true + }, + "git_refdb_set_backend": { + "ignore": true + }, + "git_refdb_new": { + "ignore": true + } + } + }, + "refdb_backend": { + "ignore": true + }, + "reference": { + "cppClassName": "GitRefs", + "selfFreeing": true, + "functions": { + "git_reference__alloc": { + "ignore": true + }, + "git_reference__alloc_symbolic": { + "ignore": true + }, + "git_reference_foreach": { + "ignore": true + }, + "git_reference_foreach_glob": { + "ignore": true + }, + "git_reference_foreach_name": { + "ignore": true + }, + "git_reference_free": { + "ignore": true + }, + "git_reference_iterator_free": { + "ignore": true + }, + "git_reference_iterator_glob_new": { + "ignore": true + }, + "git_reference_iterator_new": { + "ignore": true + }, + "git_reference_list": { + "args": { + "array": { + "isReturn": true, + "shouldAlloc": true, + "cppClassName": "Array", + "jsClassName": "Array", + "size": "count", + "key": "strings" + } + }, + "isAsync": true + }, + "git_reference_next": { + "ignore": true + }, + "git_reference_next_name": { + "ignore": true + }, + "git_reference_symbolic_set_target": { + "args": { + "log_message": { + "isOptional": true + } + } + }, + "git_reference_target": { + "return": { + "ownedByThis": true + } + }, + "git_reference_target_peel": { + "return": { + "ownedByThis": true + } + } + } + }, + "reference_iterator": { + "cDependencies": [ + "git2/sys/refdb_backend.h" + ], + "needsForwardDeclaration": false, + "ignore": true + }, + "reflog_entry": { + "functions": { + "git_reflog_entry_id_new": { + "return": { + "ownedByThis": true + } + }, + "git_reflog_entry_id_old": { + "return": { + "ownedByThis": true + } + } + } + }, + "refspec": { + "cType": "git_refspec", + "functions": { + "git_refspec_rtransform": { + "ignore": true + }, + "git_refspec_string": { + "ignore": true + }, + "git_refspec_transform": { + "ignore": true + } + } + }, + "remote": { + "cType": "git_remote", + "selfFreeing": true, + "functions": { + "git_remote_create": { + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_remote_connect": { + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_remote_disconnect": { + "isAsync": true + }, + "git_remote_download": { + "args": { + "refspecs": { + "isOptional": true + } + }, + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_remote_default_branch": { + "isAsync": true, + "args": { + "out": { + "isReturn": true + }, + "remote": { + "isSelf": true + } + }, + "return": { + "isErrorCode": true + } + }, + "git_remote_delete": { + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_remote_fetch": { + "args": { + "reflog_message": { + "isOptional": true + }, + "refspecs": { + "isOptional": true + } + }, + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_remote_get_fetch_refspecs": { + "args": { + "array": { + "isReturn": true, + "shouldAlloc": true, + "cppClassName": "Array", + "jsClassName": "Array", + "size": "count", + "key": "strings" + } + }, + "isAsync": true + }, + "git_remote_get_push_refspecs": { + "args": { + "array": { + "isReturn": true, + "shouldAlloc": true, + "cppClassName": "Array", + "jsClassName": "Array", + "size": "count", + "key": "strings" + } + }, + "isAsync": true + }, + "git_remote_get_refspec": { + "return": { + "ownedByThis": true + } + }, + "git_remote_list": { + "args": { + "out": { + "isReturn": true, + "shouldAlloc": true, + "cppClassName": "Array", + "jsClassName": "Array", + "size": "count", + "key": "strings" + } + } + }, + "git_remote_ls": { + "ignore": true + }, + "git_remote_rename": { + "ignore": true + }, + "git_remote_push": { + "isAsync": true, + "return": { + "isErrorCode": true + }, + "args": { + "opts": { + "isOptional": true + } + } + }, + "git_remote_set_fetch_refspecs": { + "ignore": true + }, + "git_remote_set_transport": { + "ignore": true + }, + "git_remote_set_push_refspecs": { + "ignore": true + }, + "git_remote_stats": { + "return": { + "ownedByThis": true + } + } + } + }, + "remote_callbacks": { + "fields": { + "completion": { + "ignore": true + }, + "pack_progress": { + "ignore": true + }, + "push_negotiation": { + "ignore": true + }, + "push_transfer_progress": { + "ignore": true + }, + "push_update_reference": { + "ignore": true + }, + "sideband_progress": { + "ignore": true + }, + "update_tips": { + "ignore": true + } + } + }, + "remote_head": { + "ignore": true + }, + "repository": { + "dependencies": [ + "git2/sys/repository.h" + ], + "functions": { + "git_repository_discover": { + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_repository_init_init_options": { + "ignore": true + }, + "git_repository_fetchhead_foreach": { + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_repository_hashfile": { + "ignore": true + }, + "git_repository_ident": { + "ignore": true + }, + "git_repository_mergehead_foreach": { + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_repository_message": { + "ignore": true + }, + "git_repository_new": { + "ignore": true + }, + "git_repository_reinit_filesystem": { + "ignore": true + }, + "git_repository_set_bare": { + "ignore": true + }, + "git_repository_set_config": { + "ignore": true + }, + "git_repository_set_head": { + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_repository_set_index": { + "args": { + "index": { + "isOptional": true + } + } + }, + "git_repository_set_odb": { + "ignore": true + }, + "git_repository_set_refdb": { + "ignore": true + } + } + }, + "revert": { + "functions": { + "git_revert": { + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_revert_commit": { + "isAsync": true, + "args": { + "merge_options": { + "isOptional": true + } + } + }, + "git_revert_init_options": { + "ignore": true + } + } + }, + "revparse": { + "functions": { + "git_revparse": { + "ignore": true + } + } + }, + "reset": { + "functions": { + "git_reset": { + "args": { + "checkout_opts": { + "isOptional": true + }, + "log_message": { + "isOptional": true + }, + "signature": { + "isOptional": true + } + }, + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_reset_default": { + "args": { + "target": { + "isOptional": true + } + }, + "isAsync": true, + "return": { + "isErrorCode": true + } + } + } + }, + "revspec": { + "ignore": true + }, + "revwalk": { + "selfFreeing": true, + "dependencies": [ + "../include/commit.h", + "../include/functions/copy.h" + ], + "functions": { + "git_revwalk_add_hide_cb": { + "ignore": true + }, + "git_revwalk_new": { + "isAsync": false + } + } + }, + "signature": { + "dupFunction": "git_signature_dup", + "functions": { + "git_signature_default": { + "isAsync": false + }, + "git_signature_new": { + "isAsync": false + }, + "git_signature_now": { + "isAsync": false + } + } + }, + "smart": { + "functions": { + "git_smart_subtransport_git": { + "ignore": true + }, + "git_smart_subtransport_http": { + "ignore": true + }, + "git_smart_subtransport_ssh": { + "ignore": true + } + } + }, + "smart_subtransport_definition": { + "ignore": true + }, + "stash": { + "functions": { + "git_stash_apply": { + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_stash_drop": { + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_stash_foreach": { + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_stash_pop": { + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_stash_save": { + "isAsync": true, + "return": { + "isErrorCode": true + } + } + } + }, + "status": { + "cDependencies": [ + "git2/sys/diff.h" + ], + "functions": { + "git_status_byindex": { + "isAsync": false + }, + "git_status_file": { + "args": { + "status_flags": { + "isReturn": true + }, + "return": { + "isErrorCode": true + } + } + }, + "git_status_foreach": { + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_status_foreach_ext": { + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_status_init_options": { + "ignore": true + } + } + }, + "status_list": { + "functions": { + "git_status_list_new": { + "isAsync": true, + "args": { + "opts": { + "isOptional": true + } + }, + "return": { + "isErrorCode": true + } + } + } + }, + "strarray": { + "dependencies": [ + "../include/str_array_converter.h" + ] + }, + "stream": { + "ignore": true, + "cDependencies": [ + "git2/sys/stream.h" + ] + }, + "submodule": { + "functions": { + "git_submodule_add_to_index": { + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_submodule_add_finalize": { + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_submodule_head_id": { + "return": { + "ownedByThis": true + } + }, + "git_submodule_init": { + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_submodule_foreach": { + "isAsync": true, + "args": { + "callback": { + "type": "git_submodule_cb", + "cType": "git_submodule_cb", + "cppClassName": "git_submodule_cb" + } + }, + "return": { + "isErrorCode": true, + "type": "int" + } + }, + "git_submodule_index_id": { + "return": { + "ownedByThis": true + } + }, + "git_submodule_wd_id": { + "return": { + "ownedByThis": true + } + }, + "git_submodule_location": { + "isAsync": true, + "args": { + "location_status": { + "shouldAlloc": true, + "isReturn": true + }, + "submodule": { + "isSelf": true + } + }, + "return": { + "isErrorCode": true + } + }, + "git_submodule_open": { + "isAsync": true, + "args": { + "repo": { + "isReturn": true + }, + "submodule": { + "isSelf": true + } + }, + "return": { + "isErrorCode": true + } + }, + "git_submodule_update": { + "isAsync": true, + "args": { + "options": { + "isOptional": true + } + }, + "return": { + "isErrorCode": true + } + }, + "git_submodule_set_ignore": { + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_submodule_set_update": { + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_submodule_set_url": { + "isAsync": true, + "return": { + "isErrorCode": true + } + }, + "git_submodule_status": { + "isAsync": true, + "args": { + "status": { + "isReturn": true, + "shouldAlloc": true + } + }, + "return": { + "isErrorCode": true + } + }, + "git_submodule_sync": { + "isAsync": true, + "return": { + "isErrorCode": true + } + } + } + }, + "tag": { + "selfFreeing": true, + "functions": { + "git_tag_foreach": { + "ignore": true + }, + "git_tag_create": { + "args": { + "oid": { + "isReturn": true + } + }, + "return": { + "isErrorCode": true + }, + "isAsync": true + }, + "git_tag_create_frombuffer": { + "ignore": true + }, + "git_tag_id": { + "return": { + "ownedByThis": true + } + }, + "git_tag_create_lightweight": { + "args": { + "oid": { + "isReturn": true + } + }, + "return": { + "isErrorCode": true + }, + "isAsync": true + }, + "git_tag_annotation_create": { + "args": { + "oid": { + "isReturn": true + } + }, + "return": { + "isErrorCode": true + }, + "isAsync": true + }, + "git_tag_list": { + "args": { + "tag_names": { + "isReturn": true, + "shouldAlloc": true, + "cppClassName": "Array", + "jsClassName": "Array", + "size": "count", + "key": "strings" + } + }, + "isAsync": true + }, + "git_tag_tagger": { + "return": { + "ownedByThis": true + } + }, + "git_tag_target": { + "args": { + "target_out": { + "isReturn": true + } + } + }, + "git_tag_target_id": { + "return": { + "ownedByThis": true + } + }, + "git_tag_delete": { + "return": { + "isErrorCode": true + }, + "isAsync": true + } + } + }, + "time": { + "dupFunction": "git_time_dup" + }, + "trace": { + "functions": { + "git_trace_set": { + "ignore": true + } + } + }, + "transfer_progress": { + "dupFunction": "git_transfer_progress_dup" + }, + "transport": { + "cType": "git_transport", + "needsForwardDeclaration": false, + "functions": { + "git_transport_dummy": { + "ignore": true + }, + "git_transport_local": { + "ignore": true + }, + "git_transport_new": { + "ignore": true + }, + "git_transport_register": { + "ignore": true + }, + "git_transport_smart": { + "ignore": true + } + }, + "cDependencies": [ + "git2/sys/transport.h" + ] + }, + "tree": { + "selfFreeing": true, + "functions": { + "git_tree_entry_byid": { + "return": { + "ownedByThis": true + } + }, + "git_tree_entry_byindex": { + "jsFunctionName": "_entryByIndex", + "return": { + "ownedByThis": true + } + }, + "git_tree_entry_byname": { + "jsFunctionName": "_entryByName", + "return": { + "ownedByThis": true + } + }, + "git_tree_entrycount": { + "jsFunctionName": "entryCount" + }, + "git_tree_id": { + "return": { + "ownedByThis": true + } + }, + "git_tree_walk": { + "ignore": true + } + } + }, + "treebuilder": { + "functions": { + "git_treebuilder_filter": { + "ignore": true + }, + "git_treebuilder_write": { + "args": { + "id": { + "isReturn": true + } + } + }, + "git_treebuilder_new": { + "args": { + "source": { + "isOptional": true + } + } + } + } + }, + "tree_entry": { + "selfFreeing": true, + "dupFunction": "git_tree_entry_dup", + "freeFunctionName": "git_tree_entry_free", + "functions": { + "git_tree_entry_id": { + "return": { + "ownedByThis": true + } + } + } + }, + "writestream": { + "cType": "git_writestream", + "needsForwardDeclaration": false + } + } +} diff --git a/generate/input/ignored-missing-tests.json b/generate/input/ignored-missing-tests.json new file mode 100644 index 000000000..eac03aba8 --- /dev/null +++ b/generate/input/ignored-missing-tests.json @@ -0,0 +1,34 @@ +{ + "blob": { + "functions": [ + "createFrombuffer", + "isBinary", + "lookup", + "rawcontent", + "rawsize" + ] + }, + "clone": { + "functions": [ + "initOptions" + ] + }, + "commit": { + "functions": [ + "parentCount", + "parentId", + "treeId" + ] + }, + "diff": { + "functions": [ + "getDelta", + "numDeltas" + ] + }, + "object": { + "functions": [ + "type" + ] + } +} diff --git a/generate/input/libgit2-docs.json b/generate/input/libgit2-docs.json new file mode 100644 index 000000000..9357295f6 --- /dev/null +++ b/generate/input/libgit2-docs.json @@ -0,0 +1,36498 @@ +{ + "files": [ + { + "file": "annotated_commit.h", + "functions": [ + "git_annotated_commit_from_ref", + "git_annotated_commit_from_fetchhead", + "git_annotated_commit_lookup", + "git_annotated_commit_from_revspec", + "git_annotated_commit_id", + "git_annotated_commit_free" + ], + "meta": {}, + "lines": 112 + }, + { + "file": "attr.h", + "functions": [ + "git_attr_value", + "git_attr_get", + "git_attr_get_many", + "git_attr_foreach", + "git_attr_cache_flush", + "git_attr_add_macro" + ], + "meta": {}, + "lines": 240 + }, + { + "file": "blame.h", + "functions": [ + "git_blame_init_options", + "git_blame_get_hunk_count", + "git_blame_get_hunk_byindex", + "git_blame_get_hunk_byline", + "git_blame_file", + "git_blame_buffer", + "git_blame_free" + ], + "meta": {}, + "lines": 207 + }, + { + "file": "blob.h", + "functions": [ + "git_blob_lookup", + "git_blob_lookup_prefix", + "git_blob_free", + "git_blob_id", + "git_blob_owner", + "git_blob_rawcontent", + "git_blob_rawsize", + "git_blob_filtered_content", + "git_blob_create_fromworkdir", + "git_blob_create_fromdisk", + "git_blob_create_fromchunks", + "git_blob_create_fromstream", + "git_blob_create_fromstream_commit", + "git_blob_create_frombuffer", + "git_blob_is_binary", + "git_blob_dup" + ], + "meta": {}, + "lines": 269 + }, + { + "file": "branch.h", + "functions": [ + "git_branch_create", + "git_branch_create_from_annotated", + "git_branch_delete", + "git_branch_iterator_new", + "git_branch_next", + "git_branch_iterator_free", + "git_branch_move", + "git_branch_lookup", + "git_branch_name", + "git_branch_upstream", + "git_branch_set_upstream", + "git_branch_is_head" + ], + "meta": {}, + "lines": 246 + }, + { + "file": "buffer.h", + "functions": [ + "git_buf_free", + "git_buf_grow", + "git_buf_set", + "git_buf_is_binary", + "git_buf_contains_nul" + ], + "meta": {}, + "lines": 122 + }, + { + "file": "checkout.h", + "functions": [ + "git_checkout_notify_cb", + "git_checkout_progress_cb", + "git_checkout_perfdata_cb", + "git_checkout_init_options", + "git_checkout_head", + "git_checkout_index", + "git_checkout_tree" + ], + "meta": {}, + "lines": 354 + }, + { + "file": "cherrypick.h", + "functions": [ + "git_cherrypick_init_options", + "git_cherrypick_commit", + "git_cherrypick" + ], + "meta": {}, + "lines": 84 + }, + { + "file": "clone.h", + "functions": [ + "git_remote_create_cb", + "git_repository_create_cb", + "git_clone_init_options", + "git_clone" + ], + "meta": {}, + "lines": 203 + }, + { + "file": "commit.h", + "functions": [ + "git_commit_lookup", + "git_commit_lookup_prefix", + "git_commit_free", + "git_commit_id", + "git_commit_owner", + "git_commit_message_encoding", + "git_commit_message", + "git_commit_message_raw", + "git_commit_summary", + "git_commit_body", + "git_commit_time", + "git_commit_time_offset", + "git_commit_committer", + "git_commit_author", + "git_commit_raw_header", + "git_commit_tree", + "git_commit_tree_id", + "git_commit_parentcount", + "git_commit_parent", + "git_commit_parent_id", + "git_commit_nth_gen_ancestor", + "git_commit_header_field", + "git_commit_extract_signature", + "git_commit_create", + "git_commit_create_v", + "git_commit_amend", + "git_commit_create_buffer", + "git_commit_create_with_signature", + "git_commit_dup" + ], + "meta": {}, + "lines": 471 + }, + { + "file": "common.h", + "functions": [ + "git_libgit2_version", + "git_libgit2_features", + "git_libgit2_opts" + ], + "meta": {}, + "lines": 282 + }, + { + "file": "config.h", + "functions": [ + "git_config_entry_free", + "git_config_find_global", + "git_config_find_xdg", + "git_config_find_system", + "git_config_find_programdata", + "git_config_open_default", + "git_config_new", + "git_config_add_file_ondisk", + "git_config_open_ondisk", + "git_config_open_level", + "git_config_open_global", + "git_config_snapshot", + "git_config_free", + "git_config_get_entry", + "git_config_get_int32", + "git_config_get_int64", + "git_config_get_bool", + "git_config_get_path", + "git_config_get_string", + "git_config_get_string_buf", + "git_config_get_multivar_foreach", + "git_config_multivar_iterator_new", + "git_config_next", + "git_config_iterator_free", + "git_config_set_int32", + "git_config_set_int64", + "git_config_set_bool", + "git_config_set_string", + "git_config_set_multivar", + "git_config_delete_entry", + "git_config_delete_multivar", + "git_config_foreach", + "git_config_iterator_new", + "git_config_iterator_glob_new", + "git_config_foreach_match", + "git_config_get_mapped", + "git_config_lookup_map_value", + "git_config_parse_bool", + "git_config_parse_int32", + "git_config_parse_int64", + "git_config_parse_path", + "git_config_backend_foreach_match", + "git_config_lock" + ], + "meta": {}, + "lines": 724 + }, + { + "file": "cred_helpers.h", + "functions": [ + "git_cred_userpass" + ], + "meta": {}, + "lines": 48 + }, + { + "file": "describe.h", + "functions": [ + "git_describe_commit", + "git_describe_workdir", + "git_describe_format", + "git_describe_result_free" + ], + "meta": {}, + "lines": 158 + }, + { + "file": "diff.h", + "functions": [ + "git_diff_notify_cb", + "git_diff_progress_cb", + "git_diff_init_options", + "git_diff_file_cb", + "git_diff_binary_cb", + "git_diff_hunk_cb", + "git_diff_line_cb", + "git_diff_find_init_options", + "git_diff_free", + "git_diff_tree_to_tree", + "git_diff_tree_to_index", + "git_diff_index_to_workdir", + "git_diff_tree_to_workdir", + "git_diff_tree_to_workdir_with_index", + "git_diff_index_to_index", + "git_diff_merge", + "git_diff_find_similar", + "git_diff_num_deltas", + "git_diff_num_deltas_of_type", + "git_diff_get_delta", + "git_diff_is_sorted_icase", + "git_diff_foreach", + "git_diff_status_char", + "git_diff_print", + "git_diff_blobs", + "git_diff_blob_to_buffer", + "git_diff_buffers", + "git_diff_get_stats", + "git_diff_stats_files_changed", + "git_diff_stats_insertions", + "git_diff_stats_deletions", + "git_diff_stats_to_buf", + "git_diff_stats_free", + "git_diff_format_email", + "git_diff_commit_as_email", + "git_diff_format_email_init_options" + ], + "meta": {}, + "lines": 1346 + }, + { + "file": "errors.h", + "functions": [ + "giterr_last", + "giterr_clear", + "giterr_set_str", + "giterr_set_oom" + ], + "meta": {}, + "lines": 144 + }, + { + "file": "filter.h", + "functions": [ + "git_filter_list_load", + "git_filter_list_contains", + "git_filter_list_apply_to_data", + "git_filter_list_apply_to_file", + "git_filter_list_apply_to_blob", + "git_filter_list_stream_data", + "git_filter_list_stream_file", + "git_filter_list_stream_blob", + "git_filter_list_free" + ], + "meta": {}, + "lines": 210 + }, + { + "file": "global.h", + "functions": [ + "git_libgit2_init", + "git_libgit2_shutdown" + ], + "meta": {}, + "lines": 39 + }, + { + "file": "graph.h", + "functions": [ + "git_graph_ahead_behind", + "git_graph_descendant_of" + ], + "meta": {}, + "lines": 51 + }, + { + "file": "ignore.h", + "functions": [ + "git_ignore_add_rule", + "git_ignore_clear_internal_rules", + "git_ignore_path_is_ignored" + ], + "meta": {}, + "lines": 74 + }, + { + "file": "index.h", + "functions": [ + "git_index_matched_path_cb", + "git_index_open", + "git_index_new", + "git_index_free", + "git_index_owner", + "git_index_caps", + "git_index_set_caps", + "git_index_read", + "git_index_write", + "git_index_path", + "git_index_checksum", + "git_index_read_tree", + "git_index_write_tree", + "git_index_write_tree_to", + "git_index_entrycount", + "git_index_clear", + "git_index_get_byindex", + "git_index_get_bypath", + "git_index_remove", + "git_index_remove_directory", + "git_index_add", + "git_index_entry_stage", + "git_index_entry_is_conflict", + "git_index_add_bypath", + "git_index_add_frombuffer", + "git_index_remove_bypath", + "git_index_add_all", + "git_index_remove_all", + "git_index_update_all", + "git_index_find", + "git_index_find_prefix", + "git_index_conflict_add", + "git_index_conflict_get", + "git_index_conflict_remove", + "git_index_conflict_cleanup", + "git_index_has_conflicts", + "git_index_conflict_iterator_new", + "git_index_conflict_next", + "git_index_conflict_iterator_free" + ], + "meta": {}, + "lines": 780 + }, + { + "file": "indexer.h", + "functions": [ + "git_indexer_new", + "git_indexer_append", + "git_indexer_commit", + "git_indexer_hash", + "git_indexer_free" + ], + "meta": {}, + "lines": 72 + }, + { + "file": "merge.h", + "functions": [ + "git_merge_file_init_input", + "git_merge_file_init_options", + "git_merge_init_options", + "git_merge_analysis", + "git_merge_base", + "git_merge_bases", + "git_merge_base_many", + "git_merge_bases_many", + "git_merge_base_octopus", + "git_merge_file", + "git_merge_file_from_index", + "git_merge_file_result_free", + "git_merge_trees", + "git_merge_commits", + "git_merge" + ], + "meta": {}, + "lines": 578 + }, + { + "file": "message.h", + "functions": [ + "git_message_prettify" + ], + "meta": {}, + "lines": 39 + }, + { + "file": "net.h", + "functions": [ + "git_headlist_cb" + ], + "meta": {}, + "lines": 55 + }, + { + "file": "notes.h", + "functions": [ + "git_note_foreach_cb", + "git_note_iterator_new", + "git_note_iterator_free", + "git_note_next", + "git_note_read", + "git_note_author", + "git_note_committer", + "git_note_message", + "git_note_id", + "git_note_create", + "git_note_remove", + "git_note_free", + "git_note_foreach" + ], + "meta": {}, + "lines": 213 + }, + { + "file": "object.h", + "functions": [ + "git_object_lookup", + "git_object_lookup_prefix", + "git_object_lookup_bypath", + "git_object_id", + "git_object_short_id", + "git_object_type", + "git_object_owner", + "git_object_free", + "git_object_type2string", + "git_object_string2type", + "git_object_typeisloose", + "git_object__size", + "git_object_peel", + "git_object_dup" + ], + "meta": {}, + "lines": 237 + }, + { + "file": "odb.h", + "functions": [ + "git_odb_foreach_cb", + "git_odb_new", + "git_odb_open", + "git_odb_add_disk_alternate", + "git_odb_free", + "git_odb_read", + "git_odb_read_prefix", + "git_odb_read_header", + "git_odb_exists", + "git_odb_exists_prefix", + "git_odb_expand_ids", + "git_odb_refresh", + "git_odb_foreach", + "git_odb_write", + "git_odb_open_wstream", + "git_odb_stream_write", + "git_odb_stream_finalize_write", + "git_odb_stream_read", + "git_odb_stream_free", + "git_odb_open_rstream", + "git_odb_write_pack", + "git_odb_hash", + "git_odb_hashfile", + "git_odb_object_dup", + "git_odb_object_free", + "git_odb_object_id", + "git_odb_object_data", + "git_odb_object_size", + "git_odb_object_type", + "git_odb_add_backend", + "git_odb_add_alternate", + "git_odb_num_backends", + "git_odb_get_backend" + ], + "meta": {}, + "lines": 537 + }, + { + "file": "odb_backend.h", + "functions": [ + "git_odb_backend_pack", + "git_odb_backend_loose", + "git_odb_backend_one_pack" + ], + "meta": {}, + "lines": 130 + }, + { + "file": "oid.h", + "functions": [ + "git_oid_fromstr", + "git_oid_fromstrp", + "git_oid_fromstrn", + "git_oid_fromraw", + "git_oid_fmt", + "git_oid_nfmt", + "git_oid_pathfmt", + "git_oid_tostr_s", + "git_oid_tostr", + "git_oid_cpy", + "git_oid_cmp", + "git_oid_equal", + "git_oid_ncmp", + "git_oid_streq", + "git_oid_strcmp", + "git_oid_iszero", + "git_oid_shorten_new", + "git_oid_shorten_add", + "git_oid_shorten_free" + ], + "meta": {}, + "lines": 265 + }, + { + "file": "oidarray.h", + "functions": [ + "git_oidarray_free" + ], + "meta": {}, + "lines": 34 + }, + { + "file": "pack.h", + "functions": [ + "git_packbuilder_new", + "git_packbuilder_set_threads", + "git_packbuilder_insert", + "git_packbuilder_insert_tree", + "git_packbuilder_insert_commit", + "git_packbuilder_insert_walk", + "git_packbuilder_insert_recur", + "git_packbuilder_write", + "git_packbuilder_hash", + "git_packbuilder_foreach", + "git_packbuilder_object_count", + "git_packbuilder_written", + "git_packbuilder_progress", + "git_packbuilder_set_callbacks", + "git_packbuilder_free" + ], + "meta": {}, + "lines": 236 + }, + { + "file": "patch.h", + "functions": [ + "git_patch_from_diff", + "git_patch_from_blobs", + "git_patch_from_blob_and_buffer", + "git_patch_from_buffers", + "git_patch_free", + "git_patch_get_delta", + "git_patch_num_hunks", + "git_patch_line_stats", + "git_patch_get_hunk", + "git_patch_num_lines_in_hunk", + "git_patch_get_line_in_hunk", + "git_patch_size", + "git_patch_print", + "git_patch_to_buf" + ], + "meta": {}, + "lines": 268 + }, + { + "file": "pathspec.h", + "functions": [ + "git_pathspec_new", + "git_pathspec_free", + "git_pathspec_matches_path", + "git_pathspec_match_workdir", + "git_pathspec_match_index", + "git_pathspec_match_tree", + "git_pathspec_match_diff", + "git_pathspec_match_list_free", + "git_pathspec_match_list_entrycount", + "git_pathspec_match_list_entry", + "git_pathspec_match_list_diff_entry", + "git_pathspec_match_list_failed_entrycount", + "git_pathspec_match_list_failed_entry" + ], + "meta": {}, + "lines": 260 + }, + { + "file": "proxy.h", + "functions": [ + "git_proxy_init_options" + ], + "meta": {}, + "lines": 88 + }, + { + "file": "rebase.h", + "functions": [ + "git_rebase_init_options", + "git_rebase_init", + "git_rebase_open", + "git_rebase_operation_entrycount", + "git_rebase_operation_current", + "git_rebase_operation_byindex", + "git_rebase_next", + "git_rebase_inmemory_index", + "git_rebase_commit", + "git_rebase_abort", + "git_rebase_finish", + "git_rebase_free" + ], + "meta": {}, + "lines": 316 + }, + { + "file": "refdb.h", + "functions": [ + "git_refdb_new", + "git_refdb_open", + "git_refdb_compress", + "git_refdb_free" + ], + "meta": {}, + "lines": 63 + }, + { + "file": "reflog.h", + "functions": [ + "git_reflog_read", + "git_reflog_write", + "git_reflog_append", + "git_reflog_rename", + "git_reflog_delete", + "git_reflog_entrycount", + "git_reflog_entry_byindex", + "git_reflog_drop", + "git_reflog_entry_id_old", + "git_reflog_entry_id_new", + "git_reflog_entry_committer", + "git_reflog_entry_message", + "git_reflog_free" + ], + "meta": {}, + "lines": 166 + }, + { + "file": "refs.h", + "functions": [ + "git_reference_lookup", + "git_reference_name_to_id", + "git_reference_dwim", + "git_reference_symbolic_create_matching", + "git_reference_symbolic_create", + "git_reference_create", + "git_reference_create_matching", + "git_reference_target", + "git_reference_target_peel", + "git_reference_symbolic_target", + "git_reference_type", + "git_reference_name", + "git_reference_resolve", + "git_reference_owner", + "git_reference_symbolic_set_target", + "git_reference_set_target", + "git_reference_rename", + "git_reference_delete", + "git_reference_remove", + "git_reference_list", + "git_reference_foreach", + "git_reference_foreach_name", + "git_reference_free", + "git_reference_cmp", + "git_reference_iterator_new", + "git_reference_iterator_glob_new", + "git_reference_next", + "git_reference_next_name", + "git_reference_iterator_free", + "git_reference_foreach_glob", + "git_reference_has_log", + "git_reference_ensure_log", + "git_reference_is_branch", + "git_reference_is_remote", + "git_reference_is_tag", + "git_reference_is_note", + "git_reference_normalize_name", + "git_reference_peel", + "git_reference_is_valid_name", + "git_reference_shorthand" + ], + "meta": {}, + "lines": 730 + }, + { + "file": "refspec.h", + "functions": [ + "git_refspec_src", + "git_refspec_dst", + "git_refspec_string", + "git_refspec_force", + "git_refspec_direction", + "git_refspec_src_matches", + "git_refspec_dst_matches", + "git_refspec_transform", + "git_refspec_rtransform" + ], + "meta": {}, + "lines": 100 + }, + { + "file": "remote.h", + "functions": [ + "git_remote_rename_problem_cb", + "git_remote_create", + "git_remote_create_with_fetchspec", + "git_remote_create_anonymous", + "git_remote_lookup", + "git_remote_dup", + "git_remote_owner", + "git_remote_name", + "git_remote_url", + "git_remote_pushurl", + "git_remote_set_url", + "git_remote_set_pushurl", + "git_remote_add_fetch", + "git_remote_get_fetch_refspecs", + "git_remote_add_push", + "git_remote_get_push_refspecs", + "git_remote_refspec_count", + "git_remote_get_refspec", + "git_remote_connect", + "git_remote_ls", + "git_remote_connected", + "git_remote_stop", + "git_remote_disconnect", + "git_remote_free", + "git_remote_list", + "git_push_transfer_progress", + "git_push_negotiation", + "git_remote_init_callbacks", + "git_fetch_init_options", + "git_push_init_options", + "git_remote_download", + "git_remote_upload", + "git_remote_update_tips", + "git_remote_fetch", + "git_remote_prune", + "git_remote_push", + "git_remote_stats", + "git_remote_autotag", + "git_remote_set_autotag", + "git_remote_prune_refs", + "git_remote_rename", + "git_remote_is_valid_name", + "git_remote_delete", + "git_remote_default_branch" + ], + "meta": {}, + "lines": 820 + }, + { + "file": "repository.h", + "functions": [ + "git_repository_open", + "git_repository_wrap_odb", + "git_repository_discover", + "git_repository_open_ext", + "git_repository_open_bare", + "git_repository_free", + "git_repository_init", + "git_repository_init_init_options", + "git_repository_init_ext", + "git_repository_head", + "git_repository_head_detached", + "git_repository_head_unborn", + "git_repository_is_empty", + "git_repository_path", + "git_repository_workdir", + "git_repository_set_workdir", + "git_repository_is_bare", + "git_repository_config", + "git_repository_config_snapshot", + "git_repository_odb", + "git_repository_refdb", + "git_repository_index", + "git_repository_message", + "git_repository_message_remove", + "git_repository_state_cleanup", + "git_repository_fetchhead_foreach", + "git_repository_mergehead_foreach", + "git_repository_hashfile", + "git_repository_set_head", + "git_repository_set_head_detached", + "git_repository_set_head_detached_from_annotated", + "git_repository_detach_head", + "git_repository_state", + "git_repository_set_namespace", + "git_repository_get_namespace", + "git_repository_is_shallow", + "git_repository_ident", + "git_repository_set_ident" + ], + "meta": {}, + "lines": 752 + }, + { + "file": "reset.h", + "functions": [ + "git_reset", + "git_reset_from_annotated", + "git_reset_default" + ], + "meta": {}, + "lines": 107 + }, + { + "file": "revert.h", + "functions": [ + "git_revert_init_options", + "git_revert_commit", + "git_revert" + ], + "meta": {}, + "lines": 84 + }, + { + "file": "revparse.h", + "functions": [ + "git_revparse_single", + "git_revparse_ext", + "git_revparse" + ], + "meta": {}, + "lines": 108 + }, + { + "file": "revwalk.h", + "functions": [ + "git_revwalk_new", + "git_revwalk_reset", + "git_revwalk_push", + "git_revwalk_push_glob", + "git_revwalk_push_head", + "git_revwalk_hide", + "git_revwalk_hide_glob", + "git_revwalk_hide_head", + "git_revwalk_push_ref", + "git_revwalk_hide_ref", + "git_revwalk_next", + "git_revwalk_sorting", + "git_revwalk_push_range", + "git_revwalk_simplify_first_parent", + "git_revwalk_free", + "git_revwalk_repository", + "git_revwalk_hide_cb", + "git_revwalk_add_hide_cb" + ], + "meta": {}, + "lines": 293 + }, + { + "file": "signature.h", + "functions": [ + "git_signature_new", + "git_signature_now", + "git_signature_default", + "git_signature_dup", + "git_signature_free" + ], + "meta": {}, + "lines": 86 + }, + { + "file": "stash.h", + "functions": [ + "git_stash_apply_progress_cb", + "git_stash_apply_init_options", + "git_stash_apply", + "git_stash_cb", + "git_stash_foreach", + "git_stash_drop", + "git_stash_pop" + ], + "meta": {}, + "lines": 253 + }, + { + "file": "status.h", + "functions": [ + "git_status_cb", + "git_status_init_options", + "git_status_foreach", + "git_status_foreach_ext", + "git_status_file", + "git_status_list_new", + "git_status_list_entrycount", + "git_status_byindex", + "git_status_list_free", + "git_status_should_ignore" + ], + "meta": {}, + "lines": 366 + }, + { + "file": "strarray.h", + "functions": [ + "git_strarray_free", + "git_strarray_copy" + ], + "meta": {}, + "lines": 53 + }, + { + "file": "submodule.h", + "functions": [ + "git_submodule_cb", + "git_submodule_update_init_options", + "git_submodule_update", + "git_submodule_lookup", + "git_submodule_free", + "git_submodule_foreach", + "git_submodule_add_setup", + "git_submodule_add_finalize", + "git_submodule_add_to_index", + "git_submodule_owner", + "git_submodule_name", + "git_submodule_path", + "git_submodule_url", + "git_submodule_resolve_url", + "git_submodule_branch", + "git_submodule_set_branch", + "git_submodule_set_url", + "git_submodule_index_id", + "git_submodule_head_id", + "git_submodule_wd_id", + "git_submodule_ignore", + "git_submodule_set_ignore", + "git_submodule_update_strategy", + "git_submodule_set_update", + "git_submodule_fetch_recurse_submodules", + "git_submodule_set_fetch_recurse_submodules", + "git_submodule_init", + "git_submodule_repo_init", + "git_submodule_sync", + "git_submodule_open", + "git_submodule_reload", + "git_submodule_status", + "git_submodule_location" + ], + "meta": {}, + "lines": 633 + }, + { + "file": "sys/commit.h", + "functions": [ + "git_commit_create_from_callback" + ], + "meta": {}, + "lines": 76 + }, + { + "file": "sys/config.h", + "functions": [ + "git_config_init_backend", + "git_config_add_backend" + ], + "meta": {}, + "lines": 123 + }, + { + "file": "sys/diff.h", + "functions": [ + "git_diff_print_callback__to_buf", + "git_diff_print_callback__to_file_handle", + "git_diff_get_perfdata", + "git_status_list_get_perfdata" + ], + "meta": {}, + "lines": 90 + }, + { + "file": "sys/filter.h", + "functions": [ + "git_filter_lookup", + "git_filter_list_new", + "git_filter_list_push", + "git_filter_source_repo", + "git_filter_source_path", + "git_filter_source_filemode", + "git_filter_source_id", + "git_filter_source_mode", + "git_filter_source_flags", + "git_filter_init_fn", + "git_filter_shutdown_fn", + "git_filter_check_fn", + "git_filter_apply_fn", + "git_filter_cleanup_fn", + "git_filter_register", + "git_filter_unregister" + ], + "meta": {}, + "lines": 317 + }, + { + "file": "sys/hashsig.h", + "functions": [ + "git_hashsig_create_fromfile", + "git_hashsig_free", + "git_hashsig_compare" + ], + "meta": {}, + "lines": 102 + }, + { + "file": "sys/mempack.h", + "functions": [ + "git_mempack_new", + "git_mempack_reset" + ], + "meta": {}, + "lines": 81 + }, + { + "file": "sys/merge.h", + "functions": [ + "git_merge_driver_init_fn", + "git_merge_driver_shutdown_fn", + "git_merge_driver_apply_fn" + ], + "meta": {}, + "lines": 135 + }, + { + "file": "sys/odb_backend.h", + "functions": [ + "git_odb_init_backend" + ], + "meta": {}, + "lines": 106 + }, + { + "file": "sys/openssl.h", + "functions": [ + "git_openssl_set_locking" + ], + "meta": {}, + "lines": 34 + }, + { + "file": "sys/refdb_backend.h", + "functions": [ + "git_refdb_init_backend", + "git_refdb_backend_fs", + "git_refdb_set_backend" + ], + "meta": {}, + "lines": 214 + }, + { + "file": "sys/refs.h", + "functions": [ + "git_reference__alloc", + "git_reference__alloc_symbolic" + ], + "meta": {}, + "lines": 45 + }, + { + "file": "sys/repository.h", + "functions": [ + "git_repository_new", + "git_repository__cleanup", + "git_repository_reinit_filesystem", + "git_repository_set_config", + "git_repository_set_odb", + "git_repository_set_refdb", + "git_repository_set_index", + "git_repository_set_bare" + ], + "meta": {}, + "lines": 136 + }, + { + "file": "sys/stream.h", + "functions": [ + "git_stream_register_tls" + ], + "meta": {}, + "lines": 54 + }, + { + "file": "sys/transport.h", + "functions": [ + "git_transport_init", + "git_transport_new", + "git_transport_ssh_with_paths", + "git_transport_unregister", + "git_transport_dummy", + "git_transport_local", + "git_transport_smart", + "git_transport_smart_certificate_check", + "git_smart_subtransport_http", + "git_smart_subtransport_git", + "git_smart_subtransport_ssh" + ], + "meta": {}, + "lines": 379 + }, + { + "file": "tag.h", + "functions": [ + "git_tag_lookup", + "git_tag_lookup_prefix", + "git_tag_free", + "git_tag_id", + "git_tag_owner", + "git_tag_target", + "git_tag_target_id", + "git_tag_target_type", + "git_tag_name", + "git_tag_tagger", + "git_tag_message", + "git_tag_create", + "git_tag_annotation_create", + "git_tag_create_frombuffer", + "git_tag_create_lightweight", + "git_tag_delete", + "git_tag_list", + "git_tag_list_match", + "git_tag_foreach", + "git_tag_peel", + "git_tag_dup" + ], + "meta": {}, + "lines": 357 + }, + { + "file": "trace.h", + "functions": [ + "git_trace_callback", + "git_trace_set" + ], + "meta": {}, + "lines": 63 + }, + { + "file": "transport.h", + "functions": [ + "git_transport_cb", + "git_cred_has_username", + "git_cred_userpass_plaintext_new", + "git_cred_ssh_key_new", + "git_cred_ssh_interactive_new", + "git_cred_ssh_key_from_agent", + "git_cred_ssh_custom_new", + "git_cred_default_new", + "git_cred_username_new", + "git_cred_ssh_key_memory_new", + "git_cred_free", + "git_cred_acquire_cb" + ], + "meta": {}, + "lines": 338 + }, + { + "file": "tree.h", + "functions": [ + "git_tree_lookup", + "git_tree_lookup_prefix", + "git_tree_free", + "git_tree_id", + "git_tree_owner", + "git_tree_entrycount", + "git_tree_entry_byname", + "git_tree_entry_byindex", + "git_tree_entry_byid", + "git_tree_entry_bypath", + "git_tree_entry_dup", + "git_tree_entry_free", + "git_tree_entry_name", + "git_tree_entry_id", + "git_tree_entry_type", + "git_tree_entry_filemode", + "git_tree_entry_filemode_raw", + "git_tree_entry_cmp", + "git_tree_entry_to_object", + "git_treebuilder_new", + "git_treebuilder_clear", + "git_treebuilder_entrycount", + "git_treebuilder_free", + "git_treebuilder_get", + "git_treebuilder_insert", + "git_treebuilder_remove", + "git_treebuilder_filter_cb", + "git_treebuilder_filter", + "git_treebuilder_write", + "git_treewalk_cb", + "git_tree_walk", + "git_tree_dup" + ], + "meta": {}, + "lines": 419 + }, + { + "file": "types.h", + "functions": [ + "git_transfer_progress_cb", + "git_transport_message_cb", + "git_transport_certificate_check_cb" + ], + "meta": {}, + "lines": 425 + } + ], + "functions": { + "git_annotated_commit_from_ref": { + "type": "function", + "file": "annotated_commit.h", + "line": 33, + "lineto": 36, + "args": [ + { + "name": "out", + "type": "git_annotated_commit **", + "comment": "pointer to store the git_annotated_commit result in" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "repository that contains the given reference" + }, + { + "name": "ref", + "type": "const git_reference *", + "comment": "reference to use to lookup the git_annotated_commit" + } + ], + "argline": "git_annotated_commit **out, git_repository *repo, const git_reference *ref", + "sig": "git_annotated_commit **::git_repository *::const git_reference *", + "return": { + "type": "int", + "comment": " 0 on success or error code" + }, + "description": "

Creates a git_annotated_commit from the given reference.\n The resulting git_annotated_commit must be freed with\n git_annotated_commit_free.

\n", + "comments": "", + "group": "annotated" + }, + "git_annotated_commit_from_fetchhead": { + "type": "function", + "file": "annotated_commit.h", + "line": 50, + "lineto": 55, + "args": [ + { + "name": "out", + "type": "git_annotated_commit **", + "comment": "pointer to store the git_annotated_commit result in" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "repository that contains the given commit" + }, + { + "name": "branch_name", + "type": "const char *", + "comment": "name of the (remote) branch" + }, + { + "name": "remote_url", + "type": "const char *", + "comment": "url of the remote" + }, + { + "name": "id", + "type": "const git_oid *", + "comment": "the commit object id of the remote branch" + } + ], + "argline": "git_annotated_commit **out, git_repository *repo, const char *branch_name, const char *remote_url, const git_oid *id", + "sig": "git_annotated_commit **::git_repository *::const char *::const char *::const git_oid *", + "return": { + "type": "int", + "comment": " 0 on success or error code" + }, + "description": "

Creates a git_annotated_commit from the given fetch head data.\n The resulting git_annotated_commit must be freed with\n git_annotated_commit_free.

\n", + "comments": "", + "group": "annotated" + }, + "git_annotated_commit_lookup": { + "type": "function", + "file": "annotated_commit.h", + "line": 75, + "lineto": 78, + "args": [ + { + "name": "out", + "type": "git_annotated_commit **", + "comment": "pointer to store the git_annotated_commit result in" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "repository that contains the given commit" + }, + { + "name": "id", + "type": "const git_oid *", + "comment": "the commit object id to lookup" + } + ], + "argline": "git_annotated_commit **out, git_repository *repo, const git_oid *id", + "sig": "git_annotated_commit **::git_repository *::const git_oid *", + "return": { + "type": "int", + "comment": " 0 on success or error code" + }, + "description": "

Creates a git_annotated_commit from the given commit id.\n The resulting git_annotated_commit must be freed with\n git_annotated_commit_free.

\n", + "comments": "

An annotated commit contains information about how it was looked up, which may be useful for functions like merge or rebase to provide context to the operation. For example, conflict files will include the name of the source or target branches being merged. It is therefore preferable to use the most specific function (eg git_annotated_commit_from_ref) instead of this one when that data is known.

\n", + "group": "annotated" + }, + "git_annotated_commit_from_revspec": { + "type": "function", + "file": "annotated_commit.h", + "line": 92, + "lineto": 95, + "args": [ + { + "name": "out", + "type": "git_annotated_commit **", + "comment": "pointer to store the git_annotated_commit result in" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "repository that contains the given commit" + }, + { + "name": "revspec", + "type": "const char *", + "comment": "the extended sha syntax string to use to lookup the commit" + } + ], + "argline": "git_annotated_commit **out, git_repository *repo, const char *revspec", + "sig": "git_annotated_commit **::git_repository *::const char *", + "return": { + "type": "int", + "comment": " 0 on success or error code" + }, + "description": "

Creates a git_annotated_comit from a revision string.

\n", + "comments": "

See man gitrevisions, or http://git-scm.com/docs/git-rev-parse.html#_specifying_revisions for information on the syntax accepted.

\n", + "group": "annotated" + }, + "git_annotated_commit_id": { + "type": "function", + "file": "annotated_commit.h", + "line": 103, + "lineto": 104, + "args": [ + { + "name": "commit", + "type": "const git_annotated_commit *", + "comment": "the given annotated commit" + } + ], + "argline": "const git_annotated_commit *commit", + "sig": "const git_annotated_commit *", + "return": { + "type": "const git_oid *", + "comment": " commit id" + }, + "description": "

Gets the commit ID that the given git_annotated_commit refers to.

\n", + "comments": "", + "group": "annotated" + }, + "git_annotated_commit_free": { + "type": "function", + "file": "annotated_commit.h", + "line": 111, + "lineto": 112, + "args": [ + { + "name": "commit", + "type": "git_annotated_commit *", + "comment": "annotated commit to free" + } + ], + "argline": "git_annotated_commit *commit", + "sig": "git_annotated_commit *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Frees a git_annotated_commit.

\n", + "comments": "", + "group": "annotated" + }, + "git_attr_value": { + "type": "function", + "file": "attr.h", + "line": 102, + "lineto": 102, + "args": [ + { + "name": "attr", + "type": "const char *", + "comment": "The attribute" + } + ], + "argline": "const char *attr", + "sig": "const char *", + "return": { + "type": "git_attr_t", + "comment": " the value type for the attribute" + }, + "description": "

Return the value type for a given attribute.

\n", + "comments": "

This can be either TRUE, FALSE, UNSPECIFIED (if the attribute was not set at all), or VALUE, if the attribute was set to an actual string.

\n\n

If the attribute has a VALUE string, it can be accessed normally as a NULL-terminated C string.

\n", + "group": "attr" + }, + "git_attr_get": { + "type": "function", + "file": "attr.h", + "line": 145, + "lineto": 150, + "args": [ + { + "name": "value_out", + "type": "const char **", + "comment": "Output of the value of the attribute. Use the GIT_ATTR_...\n macros to test for TRUE, FALSE, UNSPECIFIED, etc. or just\n use the string value for attributes set to a value. You\n should NOT modify or free this value." + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "The repository containing the path." + }, + { + "name": "flags", + "type": "uint32_t", + "comment": "A combination of GIT_ATTR_CHECK... flags." + }, + { + "name": "path", + "type": "const char *", + "comment": "The path to check for attributes. Relative paths are\n interpreted relative to the repo root. The file does\n not have to exist, but if it does not, then it will be\n treated as a plain file (not a directory)." + }, + { + "name": "name", + "type": "const char *", + "comment": "The name of the attribute to look up." + } + ], + "argline": "const char **value_out, git_repository *repo, uint32_t flags, const char *path, const char *name", + "sig": "const char **::git_repository *::uint32_t::const char *::const char *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Look up the value of one git attribute for path.

\n", + "comments": "", + "group": "attr" + }, + "git_attr_get_many": { + "type": "function", + "file": "attr.h", + "line": 181, + "lineto": 187, + "args": [ + { + "name": "values_out", + "type": "const char **", + "comment": "An array of num_attr entries that will have string\n pointers written into it for the values of the attributes.\n You should not modify or free the values that are written\n into this array (although of course, you should free the\n array itself if you allocated it)." + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "The repository containing the path." + }, + { + "name": "flags", + "type": "uint32_t", + "comment": "A combination of GIT_ATTR_CHECK... flags." + }, + { + "name": "path", + "type": "const char *", + "comment": "The path inside the repo to check attributes. This\n does not have to exist, but if it does not, then\n it will be treated as a plain file (i.e. not a directory)." + }, + { + "name": "num_attr", + "type": "size_t", + "comment": "The number of attributes being looked up" + }, + { + "name": "names", + "type": "const char **", + "comment": "An array of num_attr strings containing attribute names." + } + ], + "argline": "const char **values_out, git_repository *repo, uint32_t flags, const char *path, size_t num_attr, const char **names", + "sig": "const char **::git_repository *::uint32_t::const char *::size_t::const char **", + "return": { + "type": "int", + "comment": null + }, + "description": "

Look up a list of git attributes for path.

\n", + "comments": "

Use this if you have a known list of attributes that you want to look up in a single call. This is somewhat more efficient than calling git_attr_get() multiple times.

\n\n

For example, you might write:

\n\n
 const char *attrs[] = { "crlf", "diff", "foo" };     const char **values[3];     git_attr_get_many(values, repo, 0, "my/fun/file.c", 3, attrs);\n
\n\n

Then you could loop through the 3 values to get the settings for the three attributes you asked about.

\n", + "group": "attr" + }, + "git_attr_foreach": { + "type": "function", + "file": "attr.h", + "line": 209, + "lineto": 214, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "The repository containing the path." + }, + { + "name": "flags", + "type": "uint32_t", + "comment": "A combination of GIT_ATTR_CHECK... flags." + }, + { + "name": "path", + "type": "const char *", + "comment": "Path inside the repo to check attributes. This does not have\n to exist, but if it does not, then it will be treated as a\n plain file (i.e. not a directory)." + }, + { + "name": "callback", + "type": "git_attr_foreach_cb", + "comment": "Function to invoke on each attribute name and value. The\n value may be NULL is the attribute is explicitly set to\n UNSPECIFIED using the '!' sign. Callback will be invoked\n only once per attribute name, even if there are multiple\n rules for a given file. The highest priority rule will be\n used. Return a non-zero value from this to stop looping.\n The value will be returned from `git_attr_foreach`." + }, + { + "name": "payload", + "type": "void *", + "comment": "Passed on as extra parameter to callback function." + } + ], + "argline": "git_repository *repo, uint32_t flags, const char *path, git_attr_foreach_cb callback, void *payload", + "sig": "git_repository *::uint32_t::const char *::git_attr_foreach_cb::void *", + "return": { + "type": "int", + "comment": " 0 on success, non-zero callback return value, or error code" + }, + "description": "

Loop over all the git attributes for a path.

\n", + "comments": "", + "group": "attr" + }, + "git_attr_cache_flush": { + "type": "function", + "file": "attr.h", + "line": 224, + "lineto": 225, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": null + } + ], + "argline": "git_repository *repo", + "sig": "git_repository *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Flush the gitattributes cache.

\n", + "comments": "

Call this if you have reason to believe that the attributes files on disk no longer match the cached contents of memory. This will cause the attributes files to be reloaded the next time that an attribute access function is called.

\n", + "group": "attr" + }, + "git_attr_add_macro": { + "type": "function", + "file": "attr.h", + "line": 237, + "lineto": 240, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": null + }, + { + "name": "name", + "type": "const char *", + "comment": null + }, + { + "name": "values", + "type": "const char *", + "comment": null + } + ], + "argline": "git_repository *repo, const char *name, const char *values", + "sig": "git_repository *::const char *::const char *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Add a macro definition.

\n", + "comments": "

Macros will automatically be loaded from the top level .gitattributes file of the repository (plus the build-in "binary" macro). This function allows you to add others. For example, to add the default macro, you would call:

\n\n
 git_attr_add_macro(repo, "binary", "-diff -crlf");\n
\n", + "group": "attr" + }, + "git_blame_init_options": { + "type": "function", + "file": "blame.h", + "line": 92, + "lineto": 94, + "args": [ + { + "name": "opts", + "type": "git_blame_options *", + "comment": "The `git_blame_options` struct to initialize" + }, + { + "name": "version", + "type": "unsigned int", + "comment": "Version of struct; pass `GIT_BLAME_OPTIONS_VERSION`" + } + ], + "argline": "git_blame_options *opts, unsigned int version", + "sig": "git_blame_options *::unsigned int", + "return": { + "type": "int", + "comment": " Zero on success; -1 on failure." + }, + "description": "

Initializes a git_blame_options with default values. Equivalent to\n creating an instance with GIT_BLAME_OPTIONS_INIT.

\n", + "comments": "", + "group": "blame" + }, + "git_blame_get_hunk_count": { + "type": "function", + "file": "blame.h", + "line": 137, + "lineto": 137, + "args": [ + { + "name": "blame", + "type": "git_blame *", + "comment": null + } + ], + "argline": "git_blame *blame", + "sig": "git_blame *", + "return": { + "type": "uint32_t", + "comment": null + }, + "description": "

Gets the number of hunks that exist in the blame structure.

\n", + "comments": "", + "group": "blame" + }, + "git_blame_get_hunk_byindex": { + "type": "function", + "file": "blame.h", + "line": 146, + "lineto": 148, + "args": [ + { + "name": "blame", + "type": "git_blame *", + "comment": "the blame structure to query" + }, + { + "name": "index", + "type": "uint32_t", + "comment": "index of the hunk to retrieve" + } + ], + "argline": "git_blame *blame, uint32_t index", + "sig": "git_blame *::uint32_t", + "return": { + "type": "const git_blame_hunk *", + "comment": " the hunk at the given index, or NULL on error" + }, + "description": "

Gets the blame hunk at the given index.

\n", + "comments": "", + "group": "blame" + }, + "git_blame_get_hunk_byline": { + "type": "function", + "file": "blame.h", + "line": 157, + "lineto": 159, + "args": [ + { + "name": "blame", + "type": "git_blame *", + "comment": "the blame structure to query" + }, + { + "name": "lineno", + "type": "size_t", + "comment": "the (1-based) line number to find a hunk for" + } + ], + "argline": "git_blame *blame, size_t lineno", + "sig": "git_blame *::size_t", + "return": { + "type": "const git_blame_hunk *", + "comment": " the hunk that contains the given line, or NULL on error" + }, + "description": "

Gets the hunk that relates to the given line number in the newest commit.

\n", + "comments": "", + "group": "blame", + "examples": { + "blame.c": [ + "ex/HEAD/blame.html#git_blame_get_hunk_byline-1" + ] + } + }, + "git_blame_file": { + "type": "function", + "file": "blame.h", + "line": 172, + "lineto": 176, + "args": [ + { + "name": "out", + "type": "git_blame **", + "comment": "pointer that will receive the blame object" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "repository whose history is to be walked" + }, + { + "name": "path", + "type": "const char *", + "comment": "path to file to consider" + }, + { + "name": "options", + "type": "git_blame_options *", + "comment": "options for the blame operation. If NULL, this is treated as\n though GIT_BLAME_OPTIONS_INIT were passed." + } + ], + "argline": "git_blame **out, git_repository *repo, const char *path, git_blame_options *options", + "sig": "git_blame **::git_repository *::const char *::git_blame_options *", + "return": { + "type": "int", + "comment": " 0 on success, or an error code. (use giterr_last for information\n about the error.)" + }, + "description": "

Get the blame for a single file.

\n", + "comments": "", + "group": "blame", + "examples": { + "blame.c": [ + "ex/HEAD/blame.html#git_blame_file-2" + ] + } + }, + "git_blame_buffer": { + "type": "function", + "file": "blame.h", + "line": 196, + "lineto": 200, + "args": [ + { + "name": "out", + "type": "git_blame **", + "comment": "pointer that will receive the resulting blame data" + }, + { + "name": "reference", + "type": "git_blame *", + "comment": "cached blame from the history of the file (usually the output\n from git_blame_file)" + }, + { + "name": "buffer", + "type": "const char *", + "comment": "the (possibly) modified contents of the file" + }, + { + "name": "buffer_len", + "type": "size_t", + "comment": "number of valid bytes in the buffer" + } + ], + "argline": "git_blame **out, git_blame *reference, const char *buffer, size_t buffer_len", + "sig": "git_blame **::git_blame *::const char *::size_t", + "return": { + "type": "int", + "comment": " 0 on success, or an error code. (use giterr_last for information\n about the error)" + }, + "description": "

Get blame data for a file that has been modified in memory. The reference\n parameter is a pre-calculated blame for the in-odb history of the file. This\n means that once a file blame is completed (which can be expensive), updating\n the buffer blame is very fast.

\n", + "comments": "

Lines that differ between the buffer and the committed version are marked as having a zero OID for their final_commit_id.

\n", + "group": "blame" + }, + "git_blame_free": { + "type": "function", + "file": "blame.h", + "line": 207, + "lineto": 207, + "args": [ + { + "name": "blame", + "type": "git_blame *", + "comment": "the blame structure to free" + } + ], + "argline": "git_blame *blame", + "sig": "git_blame *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Free memory allocated by git_blame_file or git_blame_buffer.

\n", + "comments": "", + "group": "blame", + "examples": { + "blame.c": [ + "ex/HEAD/blame.html#git_blame_free-3" + ] + } + }, + "git_blob_lookup": { + "type": "function", + "file": "blob.h", + "line": 33, + "lineto": 33, + "args": [ + { + "name": "blob", + "type": "git_blob **", + "comment": "pointer to the looked up blob" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repo to use when locating the blob." + }, + { + "name": "id", + "type": "const git_oid *", + "comment": "identity of the blob to locate." + } + ], + "argline": "git_blob **blob, git_repository *repo, const git_oid *id", + "sig": "git_blob **::git_repository *::const git_oid *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Lookup a blob object from a repository.

\n", + "comments": "", + "group": "blob", + "examples": { + "blame.c": [ + "ex/HEAD/blame.html#git_blob_lookup-4" + ], + "general.c": [ + "ex/HEAD/general.html#git_blob_lookup-1" + ] + } + }, + "git_blob_lookup_prefix": { + "type": "function", + "file": "blob.h", + "line": 47, + "lineto": 47, + "args": [ + { + "name": "blob", + "type": "git_blob **", + "comment": "pointer to the looked up blob" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repo to use when locating the blob." + }, + { + "name": "id", + "type": "const git_oid *", + "comment": "identity of the blob to locate." + }, + { + "name": "len", + "type": "size_t", + "comment": "the length of the short identifier" + } + ], + "argline": "git_blob **blob, git_repository *repo, const git_oid *id, size_t len", + "sig": "git_blob **::git_repository *::const git_oid *::size_t", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Lookup a blob object from a repository,\n given a prefix of its identifier (short id).

\n", + "comments": "", + "group": "blob" + }, + "git_blob_free": { + "type": "function", + "file": "blob.h", + "line": 60, + "lineto": 60, + "args": [ + { + "name": "blob", + "type": "git_blob *", + "comment": "the blob to close" + } + ], + "argline": "git_blob *blob", + "sig": "git_blob *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Close an open blob

\n", + "comments": "

This is a wrapper around git_object_free()

\n\n

IMPORTANT: It is necessary to call this method when you stop using a blob. Failure to do so will cause a memory leak.

\n", + "group": "blob", + "examples": { + "blame.c": [ + "ex/HEAD/blame.html#git_blob_free-5" + ] + } + }, + "git_blob_id": { + "type": "function", + "file": "blob.h", + "line": 68, + "lineto": 68, + "args": [ + { + "name": "blob", + "type": "const git_blob *", + "comment": "a previously loaded blob." + } + ], + "argline": "const git_blob *blob", + "sig": "const git_blob *", + "return": { + "type": "const git_oid *", + "comment": " SHA1 hash for this blob." + }, + "description": "

Get the id of a blob.

\n", + "comments": "", + "group": "blob" + }, + "git_blob_owner": { + "type": "function", + "file": "blob.h", + "line": 76, + "lineto": 76, + "args": [ + { + "name": "blob", + "type": "const git_blob *", + "comment": "A previously loaded blob." + } + ], + "argline": "const git_blob *blob", + "sig": "const git_blob *", + "return": { + "type": "git_repository *", + "comment": " Repository that contains this blob." + }, + "description": "

Get the repository that contains the blob.

\n", + "comments": "", + "group": "blob" + }, + "git_blob_rawcontent": { + "type": "function", + "file": "blob.h", + "line": 89, + "lineto": 89, + "args": [ + { + "name": "blob", + "type": "const git_blob *", + "comment": "pointer to the blob" + } + ], + "argline": "const git_blob *blob", + "sig": "const git_blob *", + "return": { + "type": "const void *", + "comment": " the pointer" + }, + "description": "

Get a read-only buffer with the raw content of a blob.

\n", + "comments": "

A pointer to the raw content of a blob is returned; this pointer is owned internally by the object and shall not be free'd. The pointer may be invalidated at a later time.

\n", + "group": "blob", + "examples": { + "blame.c": [ + "ex/HEAD/blame.html#git_blob_rawcontent-6" + ], + "cat-file.c": [ + "ex/HEAD/cat-file.html#git_blob_rawcontent-1" + ], + "general.c": [ + "ex/HEAD/general.html#git_blob_rawcontent-2" + ] + } + }, + "git_blob_rawsize": { + "type": "function", + "file": "blob.h", + "line": 97, + "lineto": 97, + "args": [ + { + "name": "blob", + "type": "const git_blob *", + "comment": "pointer to the blob" + } + ], + "argline": "const git_blob *blob", + "sig": "const git_blob *", + "return": { + "type": "git_off_t", + "comment": " size on bytes" + }, + "description": "

Get the size in bytes of the contents of a blob

\n", + "comments": "", + "group": "blob", + "examples": { + "blame.c": [ + "ex/HEAD/blame.html#git_blob_rawsize-7" + ], + "cat-file.c": [ + "ex/HEAD/cat-file.html#git_blob_rawsize-2" + ], + "general.c": [ + "ex/HEAD/general.html#git_blob_rawsize-3", + "ex/HEAD/general.html#git_blob_rawsize-4" + ] + } + }, + "git_blob_filtered_content": { + "type": "function", + "file": "blob.h", + "line": 122, + "lineto": 126, + "args": [ + { + "name": "out", + "type": "git_buf *", + "comment": "The git_buf to be filled in" + }, + { + "name": "blob", + "type": "git_blob *", + "comment": "Pointer to the blob" + }, + { + "name": "as_path", + "type": "const char *", + "comment": "Path used for file attribute lookups, etc." + }, + { + "name": "check_for_binary_data", + "type": "int", + "comment": "Should this test if blob content contains\n NUL bytes / looks like binary data before applying filters?" + } + ], + "argline": "git_buf *out, git_blob *blob, const char *as_path, int check_for_binary_data", + "sig": "git_buf *::git_blob *::const char *::int", + "return": { + "type": "int", + "comment": " 0 on success or an error code" + }, + "description": "

Get a buffer with the filtered content of a blob.

\n", + "comments": "

This applies filters as if the blob was being checked out to the working directory under the specified filename. This may apply CRLF filtering or other types of changes depending on the file attributes set for the blob and the content detected in it.

\n\n

The output is written into a git_buf which the caller must free when done (via git_buf_free).

\n\n

If no filters need to be applied, then the out buffer will just be populated with a pointer to the raw content of the blob. In that case, be careful to not free the blob until done with the buffer or copy it into memory you own.

\n", + "group": "blob" + }, + "git_blob_create_fromworkdir": { + "type": "function", + "file": "blob.h", + "line": 139, + "lineto": 139, + "args": [ + { + "name": "id", + "type": "git_oid *", + "comment": "return the id of the written blob" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "repository where the blob will be written.\n\tthis repository cannot be bare" + }, + { + "name": "relative_path", + "type": "const char *", + "comment": "file from which the blob will be created,\n\trelative to the repository's working dir" + } + ], + "argline": "git_oid *id, git_repository *repo, const char *relative_path", + "sig": "git_oid *::git_repository *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Read a file from the working folder of a repository\n and write it to the Object Database as a loose blob

\n", + "comments": "", + "group": "blob" + }, + "git_blob_create_fromdisk": { + "type": "function", + "file": "blob.h", + "line": 151, + "lineto": 151, + "args": [ + { + "name": "id", + "type": "git_oid *", + "comment": "return the id of the written blob" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "repository where the blob will be written.\n\tthis repository can be bare or not" + }, + { + "name": "path", + "type": "const char *", + "comment": "file from which the blob will be created" + } + ], + "argline": "git_oid *id, git_repository *repo, const char *path", + "sig": "git_oid *::git_repository *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Read a file from the filesystem and write its content\n to the Object Database as a loose blob

\n", + "comments": "", + "group": "blob" + }, + "git_blob_create_fromchunks": { + "type": "function", + "file": "blob.h", + "line": 187, + "lineto": 192, + "args": [ + { + "name": "id", + "type": "git_oid *", + "comment": "Return the id of the written blob" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "Repository where the blob will be written.\n This repository can be bare or not." + }, + { + "name": "hintpath", + "type": "const char *", + "comment": "If not NULL, will be used to select data filters\n to apply onto the content of the blob to be created." + }, + { + "name": "callback", + "type": "git_blob_chunk_cb", + "comment": null + }, + { + "name": "payload", + "type": "void *", + "comment": null + } + ], + "argline": "git_oid *id, git_repository *repo, const char *hintpath, git_blob_chunk_cb callback, void *payload", + "sig": "git_oid *::git_repository *::const char *::git_blob_chunk_cb::void *", + "return": { + "type": "int", + "comment": " 0 or error code (from either libgit2 or callback function)" + }, + "description": "

Write a loose blob to the Object Database from a\n provider of chunks of data.

\n", + "comments": "

If the hintpath parameter is filled, it will be used to determine what git filters should be applied to the object before it is written to the object database.

\n\n

The implementation of the callback MUST respect the following rules:

\n\n
    \n
  • content must be filled by the callback. The maximum number of bytes that the buffer can accept per call is defined by the max_length parameter. Allocation and freeing of the buffer will be taken care of by libgit2.

  • \n
  • The callback must return the number of bytes that have been written to the content buffer.

  • \n
  • When there is no more data to stream, callback should return 0. This will prevent it from being invoked anymore.

  • \n
  • If an error occurs, the callback should return a negative value. This value will be returned to the caller.

  • \n
\n", + "group": "blob" + }, + "git_blob_create_fromstream": { + "type": "function", + "file": "blob.h", + "line": 219, + "lineto": 222, + "args": [ + { + "name": "out", + "type": "git_writestream **", + "comment": "the stream into which to write" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "Repository where the blob will be written.\n This repository can be bare or not." + }, + { + "name": "hintpath", + "type": "const char *", + "comment": "If not NULL, will be used to select data filters\n to apply onto the content of the blob to be created." + } + ], + "argline": "git_writestream **out, git_repository *repo, const char *hintpath", + "sig": "git_writestream **::git_repository *::const char *", + "return": { + "type": "int", + "comment": " 0 or error code" + }, + "description": "

Create a stream to write a new blob into the object db

\n", + "comments": "

This function may need to buffer the data on disk and will in general not be the right choice if you know the size of the data to write. If you have data in memory, use git_blob_create_frombuffer(). If you do not, but know the size of the contents (and don't want/need to perform filtering), use git_odb_open_wstream().

\n\n

Don't close this stream yourself but pass it to git_blob_create_fromstream_commit() to commit the write to the object db and get the object id.

\n\n

If the hintpath parameter is filled, it will be used to determine what git filters should be applied to the object before it is written to the object database.

\n", + "group": "blob" + }, + "git_blob_create_fromstream_commit": { + "type": "function", + "file": "blob.h", + "line": 233, + "lineto": 235, + "args": [ + { + "name": "out", + "type": "git_oid *", + "comment": "the id of the new blob" + }, + { + "name": "stream", + "type": "git_writestream *", + "comment": "the stream to close" + } + ], + "argline": "git_oid *out, git_writestream *stream", + "sig": "git_oid *::git_writestream *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Close the stream and write the blob to the object db

\n", + "comments": "

The stream will be closed and freed.

\n", + "group": "blob" + }, + "git_blob_create_frombuffer": { + "type": "function", + "file": "blob.h", + "line": 246, + "lineto": 247, + "args": [ + { + "name": "id", + "type": "git_oid *", + "comment": "return the id of the written blob" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "repository where to blob will be written" + }, + { + "name": "buffer", + "type": "const void *", + "comment": "data to be written into the blob" + }, + { + "name": "len", + "type": "size_t", + "comment": "length of the data" + } + ], + "argline": "git_oid *id, git_repository *repo, const void *buffer, size_t len", + "sig": "git_oid *::git_repository *::const void *::size_t", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Write an in-memory buffer to the ODB as a blob

\n", + "comments": "", + "group": "blob" + }, + "git_blob_is_binary": { + "type": "function", + "file": "blob.h", + "line": 260, + "lineto": 260, + "args": [ + { + "name": "blob", + "type": "const git_blob *", + "comment": "The blob which content should be analyzed" + } + ], + "argline": "const git_blob *blob", + "sig": "const git_blob *", + "return": { + "type": "int", + "comment": " 1 if the content of the blob is detected\n as binary; 0 otherwise." + }, + "description": "

Determine if the blob content is most certainly binary or not.

\n", + "comments": "

The heuristic used to guess if a file is binary is taken from core git: Searching for NUL bytes and looking for a reasonable ratio of printable to non-printable characters among the first 8000 bytes.

\n", + "group": "blob" + }, + "git_blob_dup": { + "type": "function", + "file": "blob.h", + "line": 269, + "lineto": 269, + "args": [ + { + "name": "out", + "type": "git_blob **", + "comment": "Pointer to store the copy of the object" + }, + { + "name": "source", + "type": "git_blob *", + "comment": "Original object to copy" + } + ], + "argline": "git_blob **out, git_blob *source", + "sig": "git_blob **::git_blob *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Create an in-memory copy of a blob. The copy must be explicitly\n free'd or it will leak.

\n", + "comments": "", + "group": "blob" + }, + "git_branch_create": { + "type": "function", + "file": "branch.h", + "line": 50, + "lineto": 55, + "args": [ + { + "name": "out", + "type": "git_reference **", + "comment": "Pointer where to store the underlying reference." + }, + { + "name": "repo", + "type": "git_repository *", + "comment": null + }, + { + "name": "branch_name", + "type": "const char *", + "comment": "Name for the branch; this name is\n validated for consistency. It should also not conflict with\n an already existing branch name." + }, + { + "name": "target", + "type": "const git_commit *", + "comment": "Commit to which this branch should point. This object\n must belong to the given `repo`." + }, + { + "name": "force", + "type": "int", + "comment": "Overwrite existing branch." + } + ], + "argline": "git_reference **out, git_repository *repo, const char *branch_name, const git_commit *target, int force", + "sig": "git_reference **::git_repository *::const char *::const git_commit *::int", + "return": { + "type": "int", + "comment": " 0, GIT_EINVALIDSPEC or an error code.\n A proper reference is written in the refs/heads namespace\n pointing to the provided target commit." + }, + "description": "

Create a new branch pointing at a target commit

\n", + "comments": "

A new direct reference will be created pointing to this target commit. If force is true and a reference already exists with the given name, it'll be replaced.

\n\n

The returned reference must be freed by the user.

\n\n

The branch name will be checked for validity. See git_tag_create() for rules about valid names.

\n", + "group": "branch" + }, + "git_branch_create_from_annotated": { + "type": "function", + "file": "branch.h", + "line": 68, + "lineto": 73, + "args": [ + { + "name": "ref_out", + "type": "git_reference **", + "comment": null + }, + { + "name": "repository", + "type": "git_repository *", + "comment": null + }, + { + "name": "branch_name", + "type": "const char *", + "comment": null + }, + { + "name": "commit", + "type": "const git_annotated_commit *", + "comment": null + }, + { + "name": "force", + "type": "int", + "comment": null + } + ], + "argline": "git_reference **ref_out, git_repository *repository, const char *branch_name, const git_annotated_commit *commit, int force", + "sig": "git_reference **::git_repository *::const char *::const git_annotated_commit *::int", + "return": { + "type": "int", + "comment": null + }, + "description": "

Create a new branch pointing at a target commit

\n", + "comments": "

This behaves like git_branch_create() but takes an annotated commit, which lets you specify which extended sha syntax string was specified by a user, allowing for more exact reflog messages.

\n\n

See the documentation for git_branch_create().

\n", + "group": "branch" + }, + "git_branch_delete": { + "type": "function", + "file": "branch.h", + "line": 85, + "lineto": 85, + "args": [ + { + "name": "branch", + "type": "git_reference *", + "comment": "A valid reference representing a branch" + } + ], + "argline": "git_reference *branch", + "sig": "git_reference *", + "return": { + "type": "int", + "comment": " 0 on success, or an error code." + }, + "description": "

Delete an existing branch reference.

\n", + "comments": "

If the branch is successfully deleted, the passed reference object will be invalidated. The reference must be freed manually by the user.

\n", + "group": "branch" + }, + "git_branch_iterator_new": { + "type": "function", + "file": "branch.h", + "line": 101, + "lineto": 104, + "args": [ + { + "name": "out", + "type": "git_branch_iterator **", + "comment": "the iterator" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "Repository where to find the branches." + }, + { + "name": "list_flags", + "type": "git_branch_t", + "comment": "Filtering flags for the branch\n listing. Valid values are GIT_BRANCH_LOCAL, GIT_BRANCH_REMOTE\n or GIT_BRANCH_ALL." + } + ], + "argline": "git_branch_iterator **out, git_repository *repo, git_branch_t list_flags", + "sig": "git_branch_iterator **::git_repository *::git_branch_t", + "return": { + "type": "int", + "comment": " 0 on success or an error code" + }, + "description": "

Create an iterator which loops over the requested branches.

\n", + "comments": "", + "group": "branch" + }, + "git_branch_next": { + "type": "function", + "file": "branch.h", + "line": 114, + "lineto": 114, + "args": [ + { + "name": "out", + "type": "git_reference **", + "comment": "the reference" + }, + { + "name": "out_type", + "type": "git_branch_t *", + "comment": "the type of branch (local or remote-tracking)" + }, + { + "name": "iter", + "type": "git_branch_iterator *", + "comment": "the branch iterator" + } + ], + "argline": "git_reference **out, git_branch_t *out_type, git_branch_iterator *iter", + "sig": "git_reference **::git_branch_t *::git_branch_iterator *", + "return": { + "type": "int", + "comment": " 0 on success, GIT_ITEROVER if there are no more branches or an error code." + }, + "description": "

Retrieve the next branch from the iterator

\n", + "comments": "", + "group": "branch" + }, + "git_branch_iterator_free": { + "type": "function", + "file": "branch.h", + "line": 121, + "lineto": 121, + "args": [ + { + "name": "iter", + "type": "git_branch_iterator *", + "comment": "the iterator to free" + } + ], + "argline": "git_branch_iterator *iter", + "sig": "git_branch_iterator *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Free a branch iterator

\n", + "comments": "", + "group": "branch" + }, + "git_branch_move": { + "type": "function", + "file": "branch.h", + "line": 138, + "lineto": 142, + "args": [ + { + "name": "out", + "type": "git_reference **", + "comment": null + }, + { + "name": "branch", + "type": "git_reference *", + "comment": "Current underlying reference of the branch." + }, + { + "name": "new_branch_name", + "type": "const char *", + "comment": "Target name of the branch once the move\n is performed; this name is validated for consistency." + }, + { + "name": "force", + "type": "int", + "comment": "Overwrite existing branch." + } + ], + "argline": "git_reference **out, git_reference *branch, const char *new_branch_name, int force", + "sig": "git_reference **::git_reference *::const char *::int", + "return": { + "type": "int", + "comment": " 0 on success, GIT_EINVALIDSPEC or an error code." + }, + "description": "

Move/rename an existing local branch reference.

\n", + "comments": "

The new branch name will be checked for validity. See git_tag_create() for rules about valid names.

\n", + "group": "branch" + }, + "git_branch_lookup": { + "type": "function", + "file": "branch.h", + "line": 165, + "lineto": 169, + "args": [ + { + "name": "out", + "type": "git_reference **", + "comment": "pointer to the looked-up branch reference" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository to look up the branch" + }, + { + "name": "branch_name", + "type": "const char *", + "comment": "Name of the branch to be looked-up;\n this name is validated for consistency." + }, + { + "name": "branch_type", + "type": "git_branch_t", + "comment": "Type of the considered branch. This should\n be valued with either GIT_BRANCH_LOCAL or GIT_BRANCH_REMOTE." + } + ], + "argline": "git_reference **out, git_repository *repo, const char *branch_name, git_branch_t branch_type", + "sig": "git_reference **::git_repository *::const char *::git_branch_t", + "return": { + "type": "int", + "comment": " 0 on success; GIT_ENOTFOUND when no matching branch\n exists, GIT_EINVALIDSPEC, otherwise an error code." + }, + "description": "

Lookup a branch by its name in a repository.

\n", + "comments": "

The generated reference must be freed by the user.

\n\n

The branch name will be checked for validity. See git_tag_create() for rules about valid names.

\n", + "group": "branch" + }, + "git_branch_name": { + "type": "function", + "file": "branch.h", + "line": 186, + "lineto": 188, + "args": [ + { + "name": "out", + "type": "const char **", + "comment": "where the pointer of branch name is stored;\n this is valid as long as the ref is not freed." + }, + { + "name": "ref", + "type": "const git_reference *", + "comment": "the reference ideally pointing to a branch" + } + ], + "argline": "const char **out, const git_reference *ref", + "sig": "const char **::const git_reference *", + "return": { + "type": "int", + "comment": " 0 on success; otherwise an error code (e.g., if the\n ref is no local or remote branch)." + }, + "description": "

Return the name of the given local or remote branch.

\n", + "comments": "

The name of the branch matches the definition of the name for git_branch_lookup. That is, if the returned name is given to git_branch_lookup() then the reference is returned that was given to this function.

\n", + "group": "branch" + }, + "git_branch_upstream": { + "type": "function", + "file": "branch.h", + "line": 202, + "lineto": 204, + "args": [ + { + "name": "out", + "type": "git_reference **", + "comment": "Pointer where to store the retrieved\n reference." + }, + { + "name": "branch", + "type": "const git_reference *", + "comment": "Current underlying reference of the branch." + } + ], + "argline": "git_reference **out, const git_reference *branch", + "sig": "git_reference **::const git_reference *", + "return": { + "type": "int", + "comment": " 0 on success; GIT_ENOTFOUND when no remote tracking\n reference exists, otherwise an error code." + }, + "description": "

Return the reference supporting the remote tracking branch,\n given a local branch reference.

\n", + "comments": "", + "group": "branch" + }, + "git_branch_set_upstream": { + "type": "function", + "file": "branch.h", + "line": 216, + "lineto": 216, + "args": [ + { + "name": "branch", + "type": "git_reference *", + "comment": "the branch to configure" + }, + { + "name": "upstream_name", + "type": "const char *", + "comment": "remote-tracking or local branch to set as\n upstream. Pass NULL to unset." + } + ], + "argline": "git_reference *branch, const char *upstream_name", + "sig": "git_reference *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Set the upstream configuration for a given local branch

\n", + "comments": "", + "group": "branch" + }, + "git_branch_is_head": { + "type": "function", + "file": "branch.h", + "line": 245, + "lineto": 246, + "args": [ + { + "name": "branch", + "type": "const git_reference *", + "comment": "Current underlying reference of the branch." + } + ], + "argline": "const git_reference *branch", + "sig": "const git_reference *", + "return": { + "type": "int", + "comment": " 1 if HEAD points at the branch, 0 if it isn't,\n error code otherwise." + }, + "description": "

Determine if the current local branch is pointed at by HEAD.

\n", + "comments": "", + "group": "branch" + }, + "git_buf_free": { + "type": "function", + "file": "buffer.h", + "line": 72, + "lineto": 72, + "args": [ + { + "name": "buffer", + "type": "git_buf *", + "comment": "The buffer to deallocate" + } + ], + "argline": "git_buf *buffer", + "sig": "git_buf *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Free the memory referred to by the git_buf.

\n", + "comments": "

Note that this does not free the git_buf itself, just the memory pointed to by buffer->ptr. This will not free the memory if it looks like it was not allocated internally, but it will clear the buffer back to the empty state.

\n", + "group": "buf", + "examples": { + "diff.c": [ + "ex/HEAD/diff.html#git_buf_free-1" + ], + "remote.c": [ + "ex/HEAD/remote.html#git_buf_free-1" + ], + "tag.c": [ + "ex/HEAD/tag.html#git_buf_free-1" + ] + } + }, + "git_buf_grow": { + "type": "function", + "file": "buffer.h", + "line": 95, + "lineto": 95, + "args": [ + { + "name": "buffer", + "type": "git_buf *", + "comment": "The buffer to be resized; may or may not be allocated yet" + }, + { + "name": "target_size", + "type": "size_t", + "comment": "The desired available size" + } + ], + "argline": "git_buf *buffer, size_t target_size", + "sig": "git_buf *::size_t", + "return": { + "type": "int", + "comment": " 0 on success, -1 on allocation failure" + }, + "description": "

Resize the buffer allocation to make more space.

\n", + "comments": "

This will attempt to grow the buffer to accommodate the target size.

\n\n

If the buffer refers to memory that was not allocated by libgit2 (i.e. the asize field is zero), then ptr will be replaced with a newly allocated block of data. Be careful so that memory allocated by the caller is not lost. As a special variant, if you pass target_size as 0 and the memory is not allocated by libgit2, this will allocate a new buffer of size size and copy the external data into it.

\n\n

Currently, this will never shrink a buffer, only expand it.

\n\n

If the allocation fails, this will return an error and the buffer will be marked as invalid for future operations, invaliding the contents.

\n", + "group": "buf" + }, + "git_buf_set": { + "type": "function", + "file": "buffer.h", + "line": 105, + "lineto": 106, + "args": [ + { + "name": "buffer", + "type": "git_buf *", + "comment": "The buffer to set" + }, + { + "name": "data", + "type": "const void *", + "comment": "The data to copy into the buffer" + }, + { + "name": "datalen", + "type": "size_t", + "comment": "The length of the data to copy into the buffer" + } + ], + "argline": "git_buf *buffer, const void *data, size_t datalen", + "sig": "git_buf *::const void *::size_t", + "return": { + "type": "int", + "comment": " 0 on success, -1 on allocation failure" + }, + "description": "

Set buffer to a copy of some raw data.

\n", + "comments": "", + "group": "buf" + }, + "git_buf_is_binary": { + "type": "function", + "file": "buffer.h", + "line": 114, + "lineto": 114, + "args": [ + { + "name": "buf", + "type": "const git_buf *", + "comment": "Buffer to check" + } + ], + "argline": "const git_buf *buf", + "sig": "const git_buf *", + "return": { + "type": "int", + "comment": " 1 if buffer looks like non-text data" + }, + "description": "

Check quickly if buffer looks like it contains binary data

\n", + "comments": "", + "group": "buf" + }, + "git_buf_contains_nul": { + "type": "function", + "file": "buffer.h", + "line": 122, + "lineto": 122, + "args": [ + { + "name": "buf", + "type": "const git_buf *", + "comment": "Buffer to check" + } + ], + "argline": "const git_buf *buf", + "sig": "const git_buf *", + "return": { + "type": "int", + "comment": " 1 if buffer contains a NUL byte" + }, + "description": "

Check quickly if buffer contains a NUL byte

\n", + "comments": "", + "group": "buf" + }, + "git_checkout_init_options": { + "type": "function", + "file": "checkout.h", + "line": 308, + "lineto": 310, + "args": [ + { + "name": "opts", + "type": "git_checkout_options *", + "comment": "the `git_checkout_options` struct to initialize." + }, + { + "name": "version", + "type": "unsigned int", + "comment": "Version of struct; pass `GIT_CHECKOUT_OPTIONS_VERSION`" + } + ], + "argline": "git_checkout_options *opts, unsigned int version", + "sig": "git_checkout_options *::unsigned int", + "return": { + "type": "int", + "comment": " Zero on success; -1 on failure." + }, + "description": "

Initializes a git_checkout_options with default values. Equivalent to\n creating an instance with GIT_CHECKOUT_OPTIONS_INIT.

\n", + "comments": "", + "group": "checkout" + }, + "git_checkout_head": { + "type": "function", + "file": "checkout.h", + "line": 322, + "lineto": 324, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "repository to check out (must be non-bare)" + }, + { + "name": "opts", + "type": "const git_checkout_options *", + "comment": "specifies checkout options (may be NULL)" + } + ], + "argline": "git_repository *repo, const git_checkout_options *opts", + "sig": "git_repository *::const git_checkout_options *", + "return": { + "type": "int", + "comment": " 0 on success, GIT_EUNBORNBRANCH if HEAD points to a non\n existing branch, non-zero value returned by `notify_cb`, or\n other error code \n<\n 0 (use giterr_last for error details)" + }, + "description": "

Updates files in the index and the working tree to match the content of\n the commit pointed at by HEAD.

\n", + "comments": "", + "group": "checkout" + }, + "git_checkout_index": { + "type": "function", + "file": "checkout.h", + "line": 335, + "lineto": 338, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "repository into which to check out (must be non-bare)" + }, + { + "name": "index", + "type": "git_index *", + "comment": "index to be checked out (or NULL to use repository index)" + }, + { + "name": "opts", + "type": "const git_checkout_options *", + "comment": "specifies checkout options (may be NULL)" + } + ], + "argline": "git_repository *repo, git_index *index, const git_checkout_options *opts", + "sig": "git_repository *::git_index *::const git_checkout_options *", + "return": { + "type": "int", + "comment": " 0 on success, non-zero return value from `notify_cb`, or error\n code \n<\n 0 (use giterr_last for error details)" + }, + "description": "

Updates files in the working tree to match the content of the index.

\n", + "comments": "", + "group": "checkout" + }, + "git_checkout_tree": { + "type": "function", + "file": "checkout.h", + "line": 351, + "lineto": 354, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "repository to check out (must be non-bare)" + }, + { + "name": "treeish", + "type": "const git_object *", + "comment": "a commit, tag or tree which content will be used to update\n the working directory (or NULL to use HEAD)" + }, + { + "name": "opts", + "type": "const git_checkout_options *", + "comment": "specifies checkout options (may be NULL)" + } + ], + "argline": "git_repository *repo, const git_object *treeish, const git_checkout_options *opts", + "sig": "git_repository *::const git_object *::const git_checkout_options *", + "return": { + "type": "int", + "comment": " 0 on success, non-zero return value from `notify_cb`, or error\n code \n<\n 0 (use giterr_last for error details)" + }, + "description": "

Updates files in the index and working tree to match the content of the\n tree pointed at by the treeish.

\n", + "comments": "", + "group": "checkout" + }, + "git_cherrypick_init_options": { + "type": "function", + "file": "cherrypick.h", + "line": 47, + "lineto": 49, + "args": [ + { + "name": "opts", + "type": "git_cherrypick_options *", + "comment": "the `git_cherrypick_options` struct to initialize" + }, + { + "name": "version", + "type": "unsigned int", + "comment": "Version of struct; pass `GIT_CHERRYPICK_OPTIONS_VERSION`" + } + ], + "argline": "git_cherrypick_options *opts, unsigned int version", + "sig": "git_cherrypick_options *::unsigned int", + "return": { + "type": "int", + "comment": " Zero on success; -1 on failure." + }, + "description": "

Initializes a git_cherrypick_options with default values. Equivalent to\n creating an instance with GIT_CHERRYPICK_OPTIONS_INIT.

\n", + "comments": "", + "group": "cherrypick" + }, + "git_cherrypick_commit": { + "type": "function", + "file": "cherrypick.h", + "line": 65, + "lineto": 71, + "args": [ + { + "name": "out", + "type": "git_index **", + "comment": "pointer to store the index result in" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository that contains the given commits" + }, + { + "name": "cherrypick_commit", + "type": "git_commit *", + "comment": "the commit to cherry-pick" + }, + { + "name": "our_commit", + "type": "git_commit *", + "comment": "the commit to revert against (eg, HEAD)" + }, + { + "name": "mainline", + "type": "unsigned int", + "comment": "the parent of the revert commit, if it is a merge" + }, + { + "name": "merge_options", + "type": "const git_merge_options *", + "comment": "the merge options (or null for defaults)" + } + ], + "argline": "git_index **out, git_repository *repo, git_commit *cherrypick_commit, git_commit *our_commit, unsigned int mainline, const git_merge_options *merge_options", + "sig": "git_index **::git_repository *::git_commit *::git_commit *::unsigned int::const git_merge_options *", + "return": { + "type": "int", + "comment": " zero on success, -1 on failure." + }, + "description": "

Cherry-picks the given commit against the given "our" commit, producing an\n index that reflects the result of the cherry-pick.

\n", + "comments": "

The returned index must be freed explicitly with git_index_free.

\n", + "group": "cherrypick" + }, + "git_cherrypick": { + "type": "function", + "file": "cherrypick.h", + "line": 81, + "lineto": 84, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository to cherry-pick" + }, + { + "name": "commit", + "type": "git_commit *", + "comment": "the commit to cherry-pick" + }, + { + "name": "cherrypick_options", + "type": "const git_cherrypick_options *", + "comment": "the cherry-pick options (or null for defaults)" + } + ], + "argline": "git_repository *repo, git_commit *commit, const git_cherrypick_options *cherrypick_options", + "sig": "git_repository *::git_commit *::const git_cherrypick_options *", + "return": { + "type": "int", + "comment": " zero on success, -1 on failure." + }, + "description": "

Cherry-pick the given commit, producing changes in the index and working directory.

\n", + "comments": "", + "group": "cherrypick" + }, + "git_clone_init_options": { + "type": "function", + "file": "clone.h", + "line": 179, + "lineto": 181, + "args": [ + { + "name": "opts", + "type": "git_clone_options *", + "comment": "The `git_clone_options` struct to initialize" + }, + { + "name": "version", + "type": "unsigned int", + "comment": "Version of struct; pass `GIT_CLONE_OPTIONS_VERSION`" + } + ], + "argline": "git_clone_options *opts, unsigned int version", + "sig": "git_clone_options *::unsigned int", + "return": { + "type": "int", + "comment": " Zero on success; -1 on failure." + }, + "description": "

Initializes a git_clone_options with default values. Equivalent to\n creating an instance with GIT_CLONE_OPTIONS_INIT.

\n", + "comments": "", + "group": "clone" + }, + "git_clone": { + "type": "function", + "file": "clone.h", + "line": 199, + "lineto": 203, + "args": [ + { + "name": "out", + "type": "git_repository **", + "comment": "pointer that will receive the resulting repository object" + }, + { + "name": "url", + "type": "const char *", + "comment": "the remote repository to clone" + }, + { + "name": "local_path", + "type": "const char *", + "comment": "local directory to clone to" + }, + { + "name": "options", + "type": "const git_clone_options *", + "comment": "configuration options for the clone. If NULL, the\n function works as though GIT_OPTIONS_INIT were passed." + } + ], + "argline": "git_repository **out, const char *url, const char *local_path, const git_clone_options *options", + "sig": "git_repository **::const char *::const char *::const git_clone_options *", + "return": { + "type": "int", + "comment": " 0 on success, any non-zero return value from a callback\n function, or a negative value to indicate an error (use\n `giterr_last` for a detailed error message)" + }, + "description": "

Clone a remote repository.

\n", + "comments": "

By default this creates its repository and initial remote to match git's defaults. You can use the options in the callback to customize how these are created.

\n", + "group": "clone", + "examples": { + "network/clone.c": [ + "ex/HEAD/network/clone.html#git_clone-1" + ] + } + }, + "git_commit_lookup": { + "type": "function", + "file": "commit.h", + "line": 36, + "lineto": 37, + "args": [ + { + "name": "commit", + "type": "git_commit **", + "comment": "pointer to the looked up commit" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repo to use when locating the commit." + }, + { + "name": "id", + "type": "const git_oid *", + "comment": "identity of the commit to locate. If the object is\n\t\tan annotated tag it will be peeled back to the commit." + } + ], + "argline": "git_commit **commit, git_repository *repo, const git_oid *id", + "sig": "git_commit **::git_repository *::const git_oid *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Lookup a commit object from a repository.

\n", + "comments": "

The returned object should be released with git_commit_free when no longer needed.

\n", + "group": "commit", + "examples": { + "general.c": [ + "ex/HEAD/general.html#git_commit_lookup-5", + "ex/HEAD/general.html#git_commit_lookup-6", + "ex/HEAD/general.html#git_commit_lookup-7" + ], + "log.c": [ + "ex/HEAD/log.html#git_commit_lookup-1" + ] + } + }, + "git_commit_lookup_prefix": { + "type": "function", + "file": "commit.h", + "line": 55, + "lineto": 56, + "args": [ + { + "name": "commit", + "type": "git_commit **", + "comment": "pointer to the looked up commit" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repo to use when locating the commit." + }, + { + "name": "id", + "type": "const git_oid *", + "comment": "identity of the commit to locate. If the object is\n\t\tan annotated tag it will be peeled back to the commit." + }, + { + "name": "len", + "type": "size_t", + "comment": "the length of the short identifier" + } + ], + "argline": "git_commit **commit, git_repository *repo, const git_oid *id, size_t len", + "sig": "git_commit **::git_repository *::const git_oid *::size_t", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Lookup a commit object from a repository, given a prefix of its\n identifier (short id).

\n", + "comments": "

The returned object should be released with git_commit_free when no longer needed.

\n", + "group": "commit" + }, + "git_commit_free": { + "type": "function", + "file": "commit.h", + "line": 70, + "lineto": 70, + "args": [ + { + "name": "commit", + "type": "git_commit *", + "comment": "the commit to close" + } + ], + "argline": "git_commit *commit", + "sig": "git_commit *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Close an open commit

\n", + "comments": "

This is a wrapper around git_object_free()

\n\n

IMPORTANT: It is necessary to call this method when you stop using a commit. Failure to do so will cause a memory leak.

\n", + "group": "commit", + "examples": { + "general.c": [ + "ex/HEAD/general.html#git_commit_free-8", + "ex/HEAD/general.html#git_commit_free-9", + "ex/HEAD/general.html#git_commit_free-10", + "ex/HEAD/general.html#git_commit_free-11" + ], + "log.c": [ + "ex/HEAD/log.html#git_commit_free-2", + "ex/HEAD/log.html#git_commit_free-3", + "ex/HEAD/log.html#git_commit_free-4", + "ex/HEAD/log.html#git_commit_free-5" + ] + } + }, + "git_commit_id": { + "type": "function", + "file": "commit.h", + "line": 78, + "lineto": 78, + "args": [ + { + "name": "commit", + "type": "const git_commit *", + "comment": "a previously loaded commit." + } + ], + "argline": "const git_commit *commit", + "sig": "const git_commit *", + "return": { + "type": "const git_oid *", + "comment": " object identity for the commit." + }, + "description": "

Get the id of a commit.

\n", + "comments": "", + "group": "commit", + "examples": { + "general.c": [ + "ex/HEAD/general.html#git_commit_id-12" + ], + "log.c": [ + "ex/HEAD/log.html#git_commit_id-6" + ] + } + }, + "git_commit_owner": { + "type": "function", + "file": "commit.h", + "line": 86, + "lineto": 86, + "args": [ + { + "name": "commit", + "type": "const git_commit *", + "comment": "A previously loaded commit." + } + ], + "argline": "const git_commit *commit", + "sig": "const git_commit *", + "return": { + "type": "git_repository *", + "comment": " Repository that contains this commit." + }, + "description": "

Get the repository that contains the commit.

\n", + "comments": "", + "group": "commit", + "examples": { + "log.c": [ + "ex/HEAD/log.html#git_commit_owner-7", + "ex/HEAD/log.html#git_commit_owner-8" + ] + } + }, + "git_commit_message_encoding": { + "type": "function", + "file": "commit.h", + "line": 98, + "lineto": 98, + "args": [ + { + "name": "commit", + "type": "const git_commit *", + "comment": "a previously loaded commit." + } + ], + "argline": "const git_commit *commit", + "sig": "const git_commit *", + "return": { + "type": "const char *", + "comment": " NULL, or the encoding" + }, + "description": "

Get the encoding for the message of a commit,\n as a string representing a standard encoding name.

\n", + "comments": "

The encoding may be NULL if the encoding header in the commit is missing; in that case UTF-8 is assumed.

\n", + "group": "commit" + }, + "git_commit_message": { + "type": "function", + "file": "commit.h", + "line": 109, + "lineto": 109, + "args": [ + { + "name": "commit", + "type": "const git_commit *", + "comment": "a previously loaded commit." + } + ], + "argline": "const git_commit *commit", + "sig": "const git_commit *", + "return": { + "type": "const char *", + "comment": " the message of a commit" + }, + "description": "

Get the full message of a commit.

\n", + "comments": "

The returned message will be slightly prettified by removing any potential leading newlines.

\n", + "group": "commit", + "examples": { + "cat-file.c": [ + "ex/HEAD/cat-file.html#git_commit_message-3", + "ex/HEAD/cat-file.html#git_commit_message-4" + ], + "general.c": [ + "ex/HEAD/general.html#git_commit_message-13", + "ex/HEAD/general.html#git_commit_message-14", + "ex/HEAD/general.html#git_commit_message-15" + ], + "log.c": [ + "ex/HEAD/log.html#git_commit_message-9", + "ex/HEAD/log.html#git_commit_message-10" + ], + "tag.c": [ + "ex/HEAD/tag.html#git_commit_message-2" + ] + } + }, + "git_commit_message_raw": { + "type": "function", + "file": "commit.h", + "line": 117, + "lineto": 117, + "args": [ + { + "name": "commit", + "type": "const git_commit *", + "comment": "a previously loaded commit." + } + ], + "argline": "const git_commit *commit", + "sig": "const git_commit *", + "return": { + "type": "const char *", + "comment": " the raw message of a commit" + }, + "description": "

Get the full raw message of a commit.

\n", + "comments": "", + "group": "commit" + }, + "git_commit_summary": { + "type": "function", + "file": "commit.h", + "line": 128, + "lineto": 128, + "args": [ + { + "name": "commit", + "type": "git_commit *", + "comment": "a previously loaded commit." + } + ], + "argline": "git_commit *commit", + "sig": "git_commit *", + "return": { + "type": "const char *", + "comment": " the summary of a commit or NULL on error" + }, + "description": "

Get the short "summary" of the git commit message.

\n", + "comments": "

The returned message is the summary of the commit, comprising the first paragraph of the message with whitespace trimmed and squashed.

\n", + "group": "commit" + }, + "git_commit_body": { + "type": "function", + "file": "commit.h", + "line": 141, + "lineto": 141, + "args": [ + { + "name": "commit", + "type": "git_commit *", + "comment": "a previously loaded commit." + } + ], + "argline": "git_commit *commit", + "sig": "git_commit *", + "return": { + "type": "const char *", + "comment": " the body of a commit or NULL when no the message only\n consists of a summary" + }, + "description": "

Get the long "body" of the git commit message.

\n", + "comments": "

The returned message is the body of the commit, comprising everything but the first paragraph of the message. Leading and trailing whitespaces are trimmed.

\n", + "group": "commit" + }, + "git_commit_time": { + "type": "function", + "file": "commit.h", + "line": 149, + "lineto": 149, + "args": [ + { + "name": "commit", + "type": "const git_commit *", + "comment": "a previously loaded commit." + } + ], + "argline": "const git_commit *commit", + "sig": "const git_commit *", + "return": { + "type": "git_time_t", + "comment": " the time of a commit" + }, + "description": "

Get the commit time (i.e. committer time) of a commit.

\n", + "comments": "", + "group": "commit", + "examples": { + "general.c": [ + "ex/HEAD/general.html#git_commit_time-16", + "ex/HEAD/general.html#git_commit_time-17" + ] + } + }, + "git_commit_time_offset": { + "type": "function", + "file": "commit.h", + "line": 157, + "lineto": 157, + "args": [ + { + "name": "commit", + "type": "const git_commit *", + "comment": "a previously loaded commit." + } + ], + "argline": "const git_commit *commit", + "sig": "const git_commit *", + "return": { + "type": "int", + "comment": " positive or negative timezone offset, in minutes from UTC" + }, + "description": "

Get the commit timezone offset (i.e. committer's preferred timezone) of a commit.

\n", + "comments": "", + "group": "commit" + }, + "git_commit_committer": { + "type": "function", + "file": "commit.h", + "line": 165, + "lineto": 165, + "args": [ + { + "name": "commit", + "type": "const git_commit *", + "comment": "a previously loaded commit." + } + ], + "argline": "const git_commit *commit", + "sig": "const git_commit *", + "return": { + "type": "const git_signature *", + "comment": " the committer of a commit" + }, + "description": "

Get the committer of a commit.

\n", + "comments": "", + "group": "commit", + "examples": { + "cat-file.c": [ + "ex/HEAD/cat-file.html#git_commit_committer-5" + ], + "general.c": [ + "ex/HEAD/general.html#git_commit_committer-18" + ], + "log.c": [ + "ex/HEAD/log.html#git_commit_committer-11" + ] + } + }, + "git_commit_author": { + "type": "function", + "file": "commit.h", + "line": 173, + "lineto": 173, + "args": [ + { + "name": "commit", + "type": "const git_commit *", + "comment": "a previously loaded commit." + } + ], + "argline": "const git_commit *commit", + "sig": "const git_commit *", + "return": { + "type": "const git_signature *", + "comment": " the author of a commit" + }, + "description": "

Get the author of a commit.

\n", + "comments": "", + "group": "commit", + "examples": { + "cat-file.c": [ + "ex/HEAD/cat-file.html#git_commit_author-6" + ], + "general.c": [ + "ex/HEAD/general.html#git_commit_author-19", + "ex/HEAD/general.html#git_commit_author-20" + ], + "log.c": [ + "ex/HEAD/log.html#git_commit_author-12", + "ex/HEAD/log.html#git_commit_author-13" + ] + } + }, + "git_commit_raw_header": { + "type": "function", + "file": "commit.h", + "line": 181, + "lineto": 181, + "args": [ + { + "name": "commit", + "type": "const git_commit *", + "comment": "a previously loaded commit" + } + ], + "argline": "const git_commit *commit", + "sig": "const git_commit *", + "return": { + "type": "const char *", + "comment": " the header text of the commit" + }, + "description": "

Get the full raw text of the commit header.

\n", + "comments": "", + "group": "commit" + }, + "git_commit_tree": { + "type": "function", + "file": "commit.h", + "line": 190, + "lineto": 190, + "args": [ + { + "name": "tree_out", + "type": "git_tree **", + "comment": "pointer where to store the tree object" + }, + { + "name": "commit", + "type": "const git_commit *", + "comment": "a previously loaded commit." + } + ], + "argline": "git_tree **tree_out, const git_commit *commit", + "sig": "git_tree **::const git_commit *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Get the tree pointed to by a commit.

\n", + "comments": "", + "group": "commit", + "examples": { + "log.c": [ + "ex/HEAD/log.html#git_commit_tree-14", + "ex/HEAD/log.html#git_commit_tree-15", + "ex/HEAD/log.html#git_commit_tree-16", + "ex/HEAD/log.html#git_commit_tree-17", + "ex/HEAD/log.html#git_commit_tree-18" + ] + } + }, + "git_commit_tree_id": { + "type": "function", + "file": "commit.h", + "line": 200, + "lineto": 200, + "args": [ + { + "name": "commit", + "type": "const git_commit *", + "comment": "a previously loaded commit." + } + ], + "argline": "const git_commit *commit", + "sig": "const git_commit *", + "return": { + "type": "const git_oid *", + "comment": " the id of tree pointed to by commit." + }, + "description": "

Get the id of the tree pointed to by a commit. This differs from\n git_commit_tree in that no attempts are made to fetch an object\n from the ODB.

\n", + "comments": "", + "group": "commit", + "examples": { + "cat-file.c": [ + "ex/HEAD/cat-file.html#git_commit_tree_id-7" + ] + } + }, + "git_commit_parentcount": { + "type": "function", + "file": "commit.h", + "line": 208, + "lineto": 208, + "args": [ + { + "name": "commit", + "type": "const git_commit *", + "comment": "a previously loaded commit." + } + ], + "argline": "const git_commit *commit", + "sig": "const git_commit *", + "return": { + "type": "unsigned int", + "comment": " integer of count of parents" + }, + "description": "

Get the number of parents of this commit

\n", + "comments": "", + "group": "commit", + "examples": { + "cat-file.c": [ + "ex/HEAD/cat-file.html#git_commit_parentcount-8" + ], + "general.c": [ + "ex/HEAD/general.html#git_commit_parentcount-21" + ], + "log.c": [ + "ex/HEAD/log.html#git_commit_parentcount-19", + "ex/HEAD/log.html#git_commit_parentcount-20" + ] + } + }, + "git_commit_parent": { + "type": "function", + "file": "commit.h", + "line": 218, + "lineto": 221, + "args": [ + { + "name": "out", + "type": "git_commit **", + "comment": "Pointer where to store the parent commit" + }, + { + "name": "commit", + "type": "const git_commit *", + "comment": "a previously loaded commit." + }, + { + "name": "n", + "type": "unsigned int", + "comment": "the position of the parent (from 0 to `parentcount`)" + } + ], + "argline": "git_commit **out, const git_commit *commit, unsigned int n", + "sig": "git_commit **::const git_commit *::unsigned int", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Get the specified parent of the commit.

\n", + "comments": "", + "group": "commit", + "examples": { + "general.c": [ + "ex/HEAD/general.html#git_commit_parent-22" + ], + "log.c": [ + "ex/HEAD/log.html#git_commit_parent-21", + "ex/HEAD/log.html#git_commit_parent-22" + ] + } + }, + "git_commit_parent_id": { + "type": "function", + "file": "commit.h", + "line": 232, + "lineto": 234, + "args": [ + { + "name": "commit", + "type": "const git_commit *", + "comment": "a previously loaded commit." + }, + { + "name": "n", + "type": "unsigned int", + "comment": "the position of the parent (from 0 to `parentcount`)" + } + ], + "argline": "const git_commit *commit, unsigned int n", + "sig": "const git_commit *::unsigned int", + "return": { + "type": "const git_oid *", + "comment": " the id of the parent, NULL on error." + }, + "description": "

Get the oid of a specified parent for a commit. This is different from\n git_commit_parent, which will attempt to load the parent commit from\n the ODB.

\n", + "comments": "", + "group": "commit", + "examples": { + "cat-file.c": [ + "ex/HEAD/cat-file.html#git_commit_parent_id-9" + ], + "log.c": [ + "ex/HEAD/log.html#git_commit_parent_id-23" + ] + } + }, + "git_commit_nth_gen_ancestor": { + "type": "function", + "file": "commit.h", + "line": 250, + "lineto": 253, + "args": [ + { + "name": "ancestor", + "type": "git_commit **", + "comment": "Pointer where to store the ancestor commit" + }, + { + "name": "commit", + "type": "const git_commit *", + "comment": "a previously loaded commit." + }, + { + "name": "n", + "type": "unsigned int", + "comment": "the requested generation" + } + ], + "argline": "git_commit **ancestor, const git_commit *commit, unsigned int n", + "sig": "git_commit **::const git_commit *::unsigned int", + "return": { + "type": "int", + "comment": " 0 on success; GIT_ENOTFOUND if no matching ancestor exists\n or an error code" + }, + "description": "

Get the commit object that is the \n<n

\n\n
\n

th generation ancestor\n of the named commit object, following only the first parents.\n The returned commit has to be freed by the caller.

\n
\n", + "comments": "

Passing 0 as the generation number returns another instance of the base commit itself.

\n", + "group": "commit" + }, + "git_commit_header_field": { + "type": "function", + "file": "commit.h", + "line": 264, + "lineto": 264, + "args": [ + { + "name": "out", + "type": "git_buf *", + "comment": "the buffer to fill" + }, + { + "name": "commit", + "type": "const git_commit *", + "comment": "the commit to look in" + }, + { + "name": "field", + "type": "const char *", + "comment": "the header field to return" + } + ], + "argline": "git_buf *out, const git_commit *commit, const char *field", + "sig": "git_buf *::const git_commit *::const char *", + "return": { + "type": "int", + "comment": " 0 on succeess, GIT_ENOTFOUND if the field does not exist,\n or an error code" + }, + "description": "

Get an arbitrary header field

\n", + "comments": "", + "group": "commit" + }, + "git_commit_extract_signature": { + "type": "function", + "file": "commit.h", + "line": 282, + "lineto": 282, + "args": [ + { + "name": "signature", + "type": "git_buf *", + "comment": "the signature block" + }, + { + "name": "signed_data", + "type": "git_buf *", + "comment": "signed data; this is the commit contents minus the signature block" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository in which the commit exists" + }, + { + "name": "commit_id", + "type": "git_oid *", + "comment": "the commit from which to extract the data" + }, + { + "name": "field", + "type": "const char *", + "comment": "the name of the header field containing the signature\n block; pass `NULL` to extract the default 'gpgsig'" + } + ], + "argline": "git_buf *signature, git_buf *signed_data, git_repository *repo, git_oid *commit_id, const char *field", + "sig": "git_buf *::git_buf *::git_repository *::git_oid *::const char *", + "return": { + "type": "int", + "comment": " 0 on success, GIT_ENOTFOUND if the id is not for a commit\n or the commit does not have a signature." + }, + "description": "

Extract the signature from a commit

\n", + "comments": "

If the id is not for a commit, the error class will be GITERR_INVALID. If the commit does not have a signature, the error class will be GITERR_OBJECT.

\n", + "group": "commit" + }, + "git_commit_create": { + "type": "function", + "file": "commit.h", + "line": 328, + "lineto": 338, + "args": [ + { + "name": "id", + "type": "git_oid *", + "comment": "Pointer in which to store the OID of the newly created commit" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "Repository where to store the commit" + }, + { + "name": "update_ref", + "type": "const char *", + "comment": "If not NULL, name of the reference that\n\twill be updated to point to this commit. If the reference\n\tis not direct, it will be resolved to a direct reference.\n\tUse \"HEAD\" to update the HEAD of the current branch and\n\tmake it point to this commit. If the reference doesn't\n\texist yet, it will be created. If it does exist, the first\n\tparent must be the tip of this branch." + }, + { + "name": "author", + "type": "const git_signature *", + "comment": "Signature with author and author time of commit" + }, + { + "name": "committer", + "type": "const git_signature *", + "comment": "Signature with committer and * commit time of commit" + }, + { + "name": "message_encoding", + "type": "const char *", + "comment": "The encoding for the message in the\n commit, represented with a standard encoding name.\n E.g. \"UTF-8\". If NULL, no encoding header is written and\n UTF-8 is assumed." + }, + { + "name": "message", + "type": "const char *", + "comment": "Full message for this commit" + }, + { + "name": "tree", + "type": "const git_tree *", + "comment": "An instance of a `git_tree` object that will\n be used as the tree for the commit. This tree object must\n also be owned by the given `repo`." + }, + { + "name": "parent_count", + "type": "size_t", + "comment": "Number of parents for this commit" + }, + { + "name": "parents", + "type": "const git_commit *[]", + "comment": "Array of `parent_count` pointers to `git_commit`\n objects that will be used as the parents for this commit. This\n array may be NULL if `parent_count` is 0 (root commit). All the\n given commits must be owned by the `repo`." + } + ], + "argline": "git_oid *id, git_repository *repo, const char *update_ref, const git_signature *author, const git_signature *committer, const char *message_encoding, const char *message, const git_tree *tree, size_t parent_count, const git_commit *[] parents", + "sig": "git_oid *::git_repository *::const char *::const git_signature *::const git_signature *::const char *::const char *::const git_tree *::size_t::const git_commit *[]", + "return": { + "type": "int", + "comment": " 0 or an error code\n\tThe created commit will be written to the Object Database and\n\tthe given reference will be updated to point to it" + }, + "description": "

Create new commit in the repository from a list of git_object pointers

\n", + "comments": "

The message will not be cleaned up automatically. You can do that with the git_message_prettify() function.

\n", + "group": "commit" + }, + "git_commit_create_v": { + "type": "function", + "file": "commit.h", + "line": 354, + "lineto": 364, + "args": [ + { + "name": "id", + "type": "git_oid *", + "comment": null + }, + { + "name": "repo", + "type": "git_repository *", + "comment": null + }, + { + "name": "update_ref", + "type": "const char *", + "comment": null + }, + { + "name": "author", + "type": "const git_signature *", + "comment": null + }, + { + "name": "committer", + "type": "const git_signature *", + "comment": null + }, + { + "name": "message_encoding", + "type": "const char *", + "comment": null + }, + { + "name": "message", + "type": "const char *", + "comment": null + }, + { + "name": "tree", + "type": "const git_tree *", + "comment": null + }, + { + "name": "parent_count", + "type": "size_t", + "comment": null + } + ], + "argline": "git_oid *id, git_repository *repo, const char *update_ref, const git_signature *author, const git_signature *committer, const char *message_encoding, const char *message, const git_tree *tree, size_t parent_count", + "sig": "git_oid *::git_repository *::const char *::const git_signature *::const git_signature *::const char *::const char *::const git_tree *::size_t", + "return": { + "type": "int", + "comment": null + }, + "description": "

Create new commit in the repository using a variable argument list.

\n", + "comments": "

The message will not be cleaned up automatically. You can do that with the git_message_prettify() function.

\n\n

The parents for the commit are specified as a variable list of pointers to const git_commit *. Note that this is a convenience method which may not be safe to export for certain languages or compilers

\n\n

All other parameters remain the same as git_commit_create().

\n", + "group": "commit", + "examples": { + "general.c": [ + "ex/HEAD/general.html#git_commit_create_v-23" + ], + "init.c": [ + "ex/HEAD/init.html#git_commit_create_v-1" + ] + } + }, + "git_commit_amend": { + "type": "function", + "file": "commit.h", + "line": 387, + "lineto": 395, + "args": [ + { + "name": "id", + "type": "git_oid *", + "comment": null + }, + { + "name": "commit_to_amend", + "type": "const git_commit *", + "comment": null + }, + { + "name": "update_ref", + "type": "const char *", + "comment": null + }, + { + "name": "author", + "type": "const git_signature *", + "comment": null + }, + { + "name": "committer", + "type": "const git_signature *", + "comment": null + }, + { + "name": "message_encoding", + "type": "const char *", + "comment": null + }, + { + "name": "message", + "type": "const char *", + "comment": null + }, + { + "name": "tree", + "type": "const git_tree *", + "comment": null + } + ], + "argline": "git_oid *id, const git_commit *commit_to_amend, const char *update_ref, const git_signature *author, const git_signature *committer, const char *message_encoding, const char *message, const git_tree *tree", + "sig": "git_oid *::const git_commit *::const char *::const git_signature *::const git_signature *::const char *::const char *::const git_tree *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Amend an existing commit by replacing only non-NULL values.

\n", + "comments": "

This creates a new commit that is exactly the same as the old commit, except that any non-NULL values will be updated. The new commit has the same parents as the old commit.

\n\n

The update_ref value works as in the regular git_commit_create(), updating the ref to point to the newly rewritten commit. If you want to amend a commit that is not currently the tip of the branch and then rewrite the following commits to reach a ref, pass this as NULL and update the rest of the commit chain and ref separately.

\n\n

Unlike git_commit_create(), the author, committer, message, message_encoding, and tree parameters can be NULL in which case this will use the values from the original commit_to_amend.

\n\n

All parameters have the same meanings as in git_commit_create().

\n", + "group": "commit" + }, + "git_commit_create_buffer": { + "type": "function", + "file": "commit.h", + "line": 432, + "lineto": 441, + "args": [ + { + "name": "out", + "type": "git_buf *", + "comment": "the buffer into which to write the commit object content" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "Repository where the referenced tree and parents live" + }, + { + "name": "author", + "type": "const git_signature *", + "comment": "Signature with author and author time of commit" + }, + { + "name": "committer", + "type": "const git_signature *", + "comment": "Signature with committer and * commit time of commit" + }, + { + "name": "message_encoding", + "type": "const char *", + "comment": "The encoding for the message in the\n commit, represented with a standard encoding name.\n E.g. \"UTF-8\". If NULL, no encoding header is written and\n UTF-8 is assumed." + }, + { + "name": "message", + "type": "const char *", + "comment": "Full message for this commit" + }, + { + "name": "tree", + "type": "const git_tree *", + "comment": "An instance of a `git_tree` object that will\n be used as the tree for the commit. This tree object must\n also be owned by the given `repo`." + }, + { + "name": "parent_count", + "type": "size_t", + "comment": "Number of parents for this commit" + }, + { + "name": "parents", + "type": "const git_commit *[]", + "comment": "Array of `parent_count` pointers to `git_commit`\n objects that will be used as the parents for this commit. This\n array may be NULL if `parent_count` is 0 (root commit). All the\n given commits must be owned by the `repo`." + } + ], + "argline": "git_buf *out, git_repository *repo, const git_signature *author, const git_signature *committer, const char *message_encoding, const char *message, const git_tree *tree, size_t parent_count, const git_commit *[] parents", + "sig": "git_buf *::git_repository *::const git_signature *::const git_signature *::const char *::const char *::const git_tree *::size_t::const git_commit *[]", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Create a commit and write it into a buffer

\n", + "comments": "

Create a commit as with git_commit_create() but instead of writing it to the objectdb, write the contents of the object into a buffer.

\n", + "group": "commit" + }, + "git_commit_create_with_signature": { + "type": "function", + "file": "commit.h", + "line": 457, + "lineto": 462, + "args": [ + { + "name": "out", + "type": "git_oid *", + "comment": "the resulting commit id" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": null + }, + { + "name": "commit_content", + "type": "const char *", + "comment": "the content of the unsigned commit object" + }, + { + "name": "signature", + "type": "const char *", + "comment": "the signature to add to the commit" + }, + { + "name": "signature_field", + "type": "const char *", + "comment": "which header field should contain this\n signature. Leave `NULL` for the default of \"gpgsig\"" + } + ], + "argline": "git_oid *out, git_repository *repo, const char *commit_content, const char *signature, const char *signature_field", + "sig": "git_oid *::git_repository *::const char *::const char *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Create a commit object from the given buffer and signature

\n", + "comments": "

Given the unsigned commit object's contents, its signature and the header field in which to store the signature, attach the signature to the commit and write it into the given repository.

\n", + "group": "commit" + }, + "git_commit_dup": { + "type": "function", + "file": "commit.h", + "line": 471, + "lineto": 471, + "args": [ + { + "name": "out", + "type": "git_commit **", + "comment": "Pointer to store the copy of the commit" + }, + { + "name": "source", + "type": "git_commit *", + "comment": "Original commit to copy" + } + ], + "argline": "git_commit **out, git_commit *source", + "sig": "git_commit **::git_commit *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Create an in-memory copy of a commit. The copy must be explicitly\n free'd or it will leak.

\n", + "comments": "", + "group": "commit" + }, + "git_libgit2_version": { + "type": "function", + "file": "common.h", + "line": 105, + "lineto": 105, + "args": [ + { + "name": "major", + "type": "int *", + "comment": "Store the major version number" + }, + { + "name": "minor", + "type": "int *", + "comment": "Store the minor version number" + }, + { + "name": "rev", + "type": "int *", + "comment": "Store the revision (patch) number" + } + ], + "argline": "int *major, int *minor, int *rev", + "sig": "int *::int *::int *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Return the version of the libgit2 library\n being currently used.

\n", + "comments": "", + "group": "libgit2" + }, + "git_libgit2_features": { + "type": "function", + "file": "common.h", + "line": 136, + "lineto": 136, + "args": [], + "argline": "", + "sig": "", + "return": { + "type": "int", + "comment": " A combination of GIT_FEATURE_* values." + }, + "description": "

Query compile time options for libgit2.

\n", + "comments": "
    \n
  • GIT_FEATURE_THREADS Libgit2 was compiled with thread support. Note that thread support is still to be seen as a 'work in progress' - basic object lookups are believed to be threadsafe, but other operations may not be.

  • \n
  • GIT_FEATURE_HTTPS Libgit2 supports the https:// protocol. This requires the openssl library to be found when compiling libgit2.

  • \n
  • GIT_FEATURE_SSH Libgit2 supports the SSH protocol for network operations. This requires the libssh2 library to be found when compiling libgit2

  • \n
\n", + "group": "libgit2" + }, + "git_libgit2_opts": { + "type": "function", + "file": "common.h", + "line": 282, + "lineto": 282, + "args": [ + { + "name": "option", + "type": "int", + "comment": "Option key" + } + ], + "argline": "int option", + "sig": "int", + "return": { + "type": "int", + "comment": " 0 on success, \n<\n0 on failure" + }, + "description": "

Set or query a library global option

\n", + "comments": "

Available options:

\n\n
* opts(GIT_OPT_GET_MWINDOW_SIZE, size_t *):\n\n    > Get the maximum mmap window size\n\n* opts(GIT_OPT_SET_MWINDOW_SIZE, size_t):\n\n    > Set the maximum mmap window size\n\n* opts(GIT_OPT_GET_MWINDOW_MAPPED_LIMIT, size_t *):\n\n    > Get the maximum memory that will be mapped in total by the library\n\n* opts(GIT_OPT_SET_MWINDOW_MAPPED_LIMIT, size_t):\n\n    >Set the maximum amount of memory that can be mapped at any time        by the library\n\n* opts(GIT_OPT_GET_SEARCH_PATH, int level, git_buf *buf)\n\n    > Get the search path for a given level of config data.  "level" must       > be one of `GIT_CONFIG_LEVEL_SYSTEM`, `GIT_CONFIG_LEVEL_GLOBAL`,       > `GIT_CONFIG_LEVEL_XDG`, or `GIT_CONFIG_LEVEL_PROGRAMDATA`.        > The search path is written to the `out` buffer.\n\n* opts(GIT_OPT_SET_SEARCH_PATH, int level, const char *path)\n\n    > Set the search path for a level of config data.  The search path      > applied to shared attributes and ignore files, too.       >       > - `path` lists directories delimited by GIT_PATH_LIST_SEPARATOR.      >   Pass NULL to reset to the default (generally based on environment       >   variables).  Use magic path `$PATH` to include the old value        >   of the path (if you want to prepend or append, for instance).       >       > - `level` must be `GIT_CONFIG_LEVEL_SYSTEM`,      >   `GIT_CONFIG_LEVEL_GLOBAL`, `GIT_CONFIG_LEVEL_XDG`, or       >   `GIT_CONFIG_LEVEL_PROGRAMDATA`.\n\n* opts(GIT_OPT_SET_CACHE_OBJECT_LIMIT, git_otype type, size_t size)\n\n    > Set the maximum data size for the given type of object to be      > considered eligible for caching in memory.  Setting to value to       > zero means that that type of object will not be cached.       > Defaults to 0 for GIT_OBJ_BLOB (i.e. won't cache blobs) and 4k        > for GIT_OBJ_COMMIT, GIT_OBJ_TREE, and GIT_OBJ_TAG.\n\n* opts(GIT_OPT_SET_CACHE_MAX_SIZE, ssize_t max_storage_bytes)\n\n    > Set the maximum total data size that will be cached in memory     > across all repositories before libgit2 starts evicting objects        > from the cache.  This is a soft limit, in that the library might      > briefly exceed it, but will start aggressively evicting objects       > from cache when that happens.  The default cache size is 256MB.\n\n* opts(GIT_OPT_ENABLE_CACHING, int enabled)\n\n    > Enable or disable caching completely.     >       > Because caches are repository-specific, disabling the cache       > cannot immediately clear all cached objects, but each cache will      > be cleared on the next attempt to update anything in it.\n\n* opts(GIT_OPT_GET_CACHED_MEMORY, ssize_t *current, ssize_t *allowed)\n\n    > Get the current bytes in cache and the maximum that would be      > allowed in the cache.\n\n* opts(GIT_OPT_GET_TEMPLATE_PATH, git_buf *out)\n\n    > Get the default template path.        > The path is written to the `out` buffer.\n\n* opts(GIT_OPT_SET_TEMPLATE_PATH, const char *path)\n\n    > Set the default template path.        >       > - `path` directory of template.\n\n* opts(GIT_OPT_SET_SSL_CERT_LOCATIONS, const char *file, const char *path)\n\n    > Set the SSL certificate-authority locations.      >       > - `file` is the location of a file containing several     >   certificates concatenated together.     > - `path` is the location of a directory holding several       >   certificates, one per file.     >       > Either parameter may be `NULL`, but not both.\n\n* opts(GIT_OPT_SET_USER_AGENT, const char *user_agent)\n\n    > Set the value of the User-Agent header.  This value will be       > appended to "git/1.0", for compatibility with other git clients.      >       > - `user_agent` is the value that will be delivered as the     >   User-Agent header on HTTP requests.\n\n* opts(GIT_OPT_ENABLE_STRICT_OBJECT_CREATION, int enabled)\n\n    > Enable strict input validation when creating new objects      > to ensure that all inputs to the new objects are valid.  For      > example, when this is enabled, the parent(s) and tree inputs      > will be validated when creating a new commit.  This defaults      > to disabled.  * opts(GIT_OPT_SET_SSL_CIPHERS, const char *ciphers)\n\n    > Set the SSL ciphers use for HTTPS connections.        >       > - `ciphers` is the list of ciphers that are eanbled.\n
\n", + "group": "libgit2" + }, + "git_config_entry_free": { + "type": "function", + "file": "config.h", + "line": 75, + "lineto": 75, + "args": [ + { + "name": "", + "type": "git_config_entry *", + "comment": null + } + ], + "argline": "git_config_entry *", + "sig": "git_config_entry *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Free a config entry

\n", + "comments": "", + "group": "config" + }, + "git_config_find_global": { + "type": "function", + "file": "config.h", + "line": 116, + "lineto": 116, + "args": [ + { + "name": "out", + "type": "git_buf *", + "comment": "Pointer to a user-allocated git_buf in which to store the path" + } + ], + "argline": "git_buf *out", + "sig": "git_buf *", + "return": { + "type": "int", + "comment": " 0 if a global configuration file has been found. Its path will be stored in `out`." + }, + "description": "

Locate the path to the global configuration file

\n", + "comments": "

The user or global configuration file is usually located in $HOME/.gitconfig.

\n\n

This method will try to guess the full path to that file, if the file exists. The returned path may be used on any git_config call to load the global configuration file.

\n\n

This method will not guess the path to the xdg compatible config file (.config/git/config).

\n", + "group": "config" + }, + "git_config_find_xdg": { + "type": "function", + "file": "config.h", + "line": 133, + "lineto": 133, + "args": [ + { + "name": "out", + "type": "git_buf *", + "comment": "Pointer to a user-allocated git_buf in which to store the path" + } + ], + "argline": "git_buf *out", + "sig": "git_buf *", + "return": { + "type": "int", + "comment": " 0 if a xdg compatible configuration file has been\n\tfound. Its path will be stored in `out`." + }, + "description": "

Locate the path to the global xdg compatible configuration file

\n", + "comments": "

The xdg compatible configuration file is usually located in $HOME/.config/git/config.

\n\n

This method will try to guess the full path to that file, if the file exists. The returned path may be used on any git_config call to load the xdg compatible configuration file.

\n", + "group": "config" + }, + "git_config_find_system": { + "type": "function", + "file": "config.h", + "line": 145, + "lineto": 145, + "args": [ + { + "name": "out", + "type": "git_buf *", + "comment": "Pointer to a user-allocated git_buf in which to store the path" + } + ], + "argline": "git_buf *out", + "sig": "git_buf *", + "return": { + "type": "int", + "comment": " 0 if a system configuration file has been\n\tfound. Its path will be stored in `out`." + }, + "description": "

Locate the path to the system configuration file

\n", + "comments": "

If /etc/gitconfig doesn't exist, it will look for %PROGRAMFILES%.

\n", + "group": "config" + }, + "git_config_find_programdata": { + "type": "function", + "file": "config.h", + "line": 156, + "lineto": 156, + "args": [ + { + "name": "out", + "type": "git_buf *", + "comment": "Pointer to a user-allocated git_buf in which to store the path" + } + ], + "argline": "git_buf *out", + "sig": "git_buf *", + "return": { + "type": "int", + "comment": " 0 if a ProgramData configuration file has been\n\tfound. Its path will be stored in `out`." + }, + "description": "

Locate the path to the configuration file in ProgramData

\n", + "comments": "

Look for the file in %PROGRAMDATA% used by portable git.

\n", + "group": "config" + }, + "git_config_open_default": { + "type": "function", + "file": "config.h", + "line": 168, + "lineto": 168, + "args": [ + { + "name": "out", + "type": "git_config **", + "comment": "Pointer to store the config instance" + } + ], + "argline": "git_config **out", + "sig": "git_config **", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Open the global, XDG and system configuration files

\n", + "comments": "

Utility wrapper that finds the global, XDG and system configuration files and opens them into a single prioritized config object that can be used when accessing default config data outside a repository.

\n", + "group": "config" + }, + "git_config_new": { + "type": "function", + "file": "config.h", + "line": 179, + "lineto": 179, + "args": [ + { + "name": "out", + "type": "git_config **", + "comment": "pointer to the new configuration" + } + ], + "argline": "git_config **out", + "sig": "git_config **", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Allocate a new configuration object

\n", + "comments": "

This object is empty, so you have to add a file to it before you can do anything with it.

\n", + "group": "config" + }, + "git_config_add_file_ondisk": { + "type": "function", + "file": "config.h", + "line": 206, + "lineto": 210, + "args": [ + { + "name": "cfg", + "type": "git_config *", + "comment": "the configuration to add the file to" + }, + { + "name": "path", + "type": "const char *", + "comment": "path to the configuration file to add" + }, + { + "name": "level", + "type": "git_config_level_t", + "comment": "the priority level of the backend" + }, + { + "name": "force", + "type": "int", + "comment": "replace config file at the given priority level" + } + ], + "argline": "git_config *cfg, const char *path, git_config_level_t level, int force", + "sig": "git_config *::const char *::git_config_level_t::int", + "return": { + "type": "int", + "comment": " 0 on success, GIT_EEXISTS when adding more than one file\n for a given priority level (and force_replace set to 0),\n GIT_ENOTFOUND when the file doesn't exist or error code" + }, + "description": "

Add an on-disk config file instance to an existing config

\n", + "comments": "

The on-disk file pointed at by path will be opened and parsed; it's expected to be a native Git config file following the default Git config syntax (see man git-config).

\n\n

If the file does not exist, the file will still be added and it will be created the first time we write to it.

\n\n

Note that the configuration object will free the file automatically.

\n\n

Further queries on this config object will access each of the config file instances in order (instances with a higher priority level will be accessed first).

\n", + "group": "config" + }, + "git_config_open_ondisk": { + "type": "function", + "file": "config.h", + "line": 224, + "lineto": 224, + "args": [ + { + "name": "out", + "type": "git_config **", + "comment": "The configuration instance to create" + }, + { + "name": "path", + "type": "const char *", + "comment": "Path to the on-disk file to open" + } + ], + "argline": "git_config **out, const char *path", + "sig": "git_config **::const char *", + "return": { + "type": "int", + "comment": " 0 on success, or an error code" + }, + "description": "

Create a new config instance containing a single on-disk file

\n", + "comments": "

This method is a simple utility wrapper for the following sequence of calls: - git_config_new - git_config_add_file_ondisk

\n", + "group": "config", + "examples": { + "general.c": [ + "ex/HEAD/general.html#git_config_open_ondisk-24" + ] + } + }, + "git_config_open_level": { + "type": "function", + "file": "config.h", + "line": 242, + "lineto": 245, + "args": [ + { + "name": "out", + "type": "git_config **", + "comment": "The configuration instance to create" + }, + { + "name": "parent", + "type": "const git_config *", + "comment": "Multi-level config to search for the given level" + }, + { + "name": "level", + "type": "git_config_level_t", + "comment": "Configuration level to search for" + } + ], + "argline": "git_config **out, const git_config *parent, git_config_level_t level", + "sig": "git_config **::const git_config *::git_config_level_t", + "return": { + "type": "int", + "comment": " 0, GIT_ENOTFOUND if the passed level cannot be found in the\n multi-level parent config, or an error code" + }, + "description": "

Build a single-level focused config object from a multi-level one.

\n", + "comments": "

The returned config object can be used to perform get/set/delete operations on a single specific level.

\n\n

Getting several times the same level from the same parent multi-level config will return different config instances, but containing the same config_file instance.

\n", + "group": "config" + }, + "git_config_open_global": { + "type": "function", + "file": "config.h", + "line": 259, + "lineto": 259, + "args": [ + { + "name": "out", + "type": "git_config **", + "comment": "pointer in which to store the config object" + }, + { + "name": "config", + "type": "git_config *", + "comment": "the config object in which to look" + } + ], + "argline": "git_config **out, git_config *config", + "sig": "git_config **::git_config *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Open the global/XDG configuration file according to git's rules

\n", + "comments": "

Git allows you to store your global configuration at $HOME/.config or $XDG_CONFIG_HOME/git/config. For backwards compatability, the XDG file shouldn't be used unless the use has created it explicitly. With this function you'll open the correct one to write to.

\n", + "group": "config" + }, + "git_config_snapshot": { + "type": "function", + "file": "config.h", + "line": 275, + "lineto": 275, + "args": [ + { + "name": "out", + "type": "git_config **", + "comment": "pointer in which to store the snapshot config object" + }, + { + "name": "config", + "type": "git_config *", + "comment": "configuration to snapshot" + } + ], + "argline": "git_config **out, git_config *config", + "sig": "git_config **::git_config *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Create a snapshot of the configuration

\n", + "comments": "

Create a snapshot of the current state of a configuration, which allows you to look into a consistent view of the configuration for looking up complex values (e.g. a remote, submodule).

\n\n

The string returned when querying such a config object is valid until it is freed.

\n", + "group": "config" + }, + "git_config_free": { + "type": "function", + "file": "config.h", + "line": 282, + "lineto": 282, + "args": [ + { + "name": "cfg", + "type": "git_config *", + "comment": "the configuration to free" + } + ], + "argline": "git_config *cfg", + "sig": "git_config *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Free the configuration and its associated memory and files

\n", + "comments": "", + "group": "config" + }, + "git_config_get_entry": { + "type": "function", + "file": "config.h", + "line": 294, + "lineto": 297, + "args": [ + { + "name": "out", + "type": "git_config_entry **", + "comment": "pointer to the variable git_config_entry" + }, + { + "name": "cfg", + "type": "const git_config *", + "comment": "where to look for the variable" + }, + { + "name": "name", + "type": "const char *", + "comment": "the variable's name" + } + ], + "argline": "git_config_entry **out, const git_config *cfg, const char *name", + "sig": "git_config_entry **::const git_config *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Get the git_config_entry of a config variable.

\n", + "comments": "

Free the git_config_entry after use with git_config_entry_free().

\n", + "group": "config" + }, + "git_config_get_int32": { + "type": "function", + "file": "config.h", + "line": 311, + "lineto": 311, + "args": [ + { + "name": "out", + "type": "int32_t *", + "comment": "pointer to the variable where the value should be stored" + }, + { + "name": "cfg", + "type": "const git_config *", + "comment": "where to look for the variable" + }, + { + "name": "name", + "type": "const char *", + "comment": "the variable's name" + } + ], + "argline": "int32_t *out, const git_config *cfg, const char *name", + "sig": "int32_t *::const git_config *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Get the value of an integer config variable.

\n", + "comments": "

All config files will be looked into, in the order of their defined level. A higher level means a higher priority. The first occurrence of the variable will be returned here.

\n", + "group": "config", + "examples": { + "general.c": [ + "ex/HEAD/general.html#git_config_get_int32-25" + ] + } + }, + "git_config_get_int64": { + "type": "function", + "file": "config.h", + "line": 325, + "lineto": 325, + "args": [ + { + "name": "out", + "type": "int64_t *", + "comment": "pointer to the variable where the value should be stored" + }, + { + "name": "cfg", + "type": "const git_config *", + "comment": "where to look for the variable" + }, + { + "name": "name", + "type": "const char *", + "comment": "the variable's name" + } + ], + "argline": "int64_t *out, const git_config *cfg, const char *name", + "sig": "int64_t *::const git_config *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Get the value of a long integer config variable.

\n", + "comments": "

All config files will be looked into, in the order of their defined level. A higher level means a higher priority. The first occurrence of the variable will be returned here.

\n", + "group": "config" + }, + "git_config_get_bool": { + "type": "function", + "file": "config.h", + "line": 342, + "lineto": 342, + "args": [ + { + "name": "out", + "type": "int *", + "comment": "pointer to the variable where the value should be stored" + }, + { + "name": "cfg", + "type": "const git_config *", + "comment": "where to look for the variable" + }, + { + "name": "name", + "type": "const char *", + "comment": "the variable's name" + } + ], + "argline": "int *out, const git_config *cfg, const char *name", + "sig": "int *::const git_config *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Get the value of a boolean config variable.

\n", + "comments": "

This function uses the usual C convention of 0 being false and anything else true.

\n\n

All config files will be looked into, in the order of their defined level. A higher level means a higher priority. The first occurrence of the variable will be returned here.

\n", + "group": "config" + }, + "git_config_get_path": { + "type": "function", + "file": "config.h", + "line": 360, + "lineto": 360, + "args": [ + { + "name": "out", + "type": "git_buf *", + "comment": "the buffer in which to store the result" + }, + { + "name": "cfg", + "type": "const git_config *", + "comment": "where to look for the variable" + }, + { + "name": "name", + "type": "const char *", + "comment": "the variable's name" + } + ], + "argline": "git_buf *out, const git_config *cfg, const char *name", + "sig": "git_buf *::const git_config *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Get the value of a path config variable.

\n", + "comments": "

A leading '~' will be expanded to the global search path (which defaults to the user's home directory but can be overridden via git_libgit2_opts().

\n\n

All config files will be looked into, in the order of their defined level. A higher level means a higher priority. The first occurrence of the variable will be returned here.

\n", + "group": "config" + }, + "git_config_get_string": { + "type": "function", + "file": "config.h", + "line": 378, + "lineto": 378, + "args": [ + { + "name": "out", + "type": "const char **", + "comment": "pointer to the string" + }, + { + "name": "cfg", + "type": "const git_config *", + "comment": "where to look for the variable" + }, + { + "name": "name", + "type": "const char *", + "comment": "the variable's name" + } + ], + "argline": "const char **out, const git_config *cfg, const char *name", + "sig": "const char **::const git_config *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Get the value of a string config variable.

\n", + "comments": "

This function can only be used on snapshot config objects. The string is owned by the config and should not be freed by the user. The pointer will be valid until the config is freed.

\n\n

All config files will be looked into, in the order of their defined level. A higher level means a higher priority. The first occurrence of the variable will be returned here.

\n", + "group": "config", + "examples": { + "general.c": [ + "ex/HEAD/general.html#git_config_get_string-26" + ] + } + }, + "git_config_get_string_buf": { + "type": "function", + "file": "config.h", + "line": 394, + "lineto": 394, + "args": [ + { + "name": "out", + "type": "git_buf *", + "comment": "buffer in which to store the string" + }, + { + "name": "cfg", + "type": "const git_config *", + "comment": "where to look for the variable" + }, + { + "name": "name", + "type": "const char *", + "comment": "the variable's name" + } + ], + "argline": "git_buf *out, const git_config *cfg, const char *name", + "sig": "git_buf *::const git_config *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Get the value of a string config variable.

\n", + "comments": "

The value of the config will be copied into the buffer.

\n\n

All config files will be looked into, in the order of their defined level. A higher level means a higher priority. The first occurrence of the variable will be returned here.

\n", + "group": "config" + }, + "git_config_get_multivar_foreach": { + "type": "function", + "file": "config.h", + "line": 408, + "lineto": 408, + "args": [ + { + "name": "cfg", + "type": "const git_config *", + "comment": "where to look for the variable" + }, + { + "name": "name", + "type": "const char *", + "comment": "the variable's name" + }, + { + "name": "regexp", + "type": "const char *", + "comment": "regular expression to filter which variables we're\n interested in. Use NULL to indicate all" + }, + { + "name": "callback", + "type": "git_config_foreach_cb", + "comment": "the function to be called on each value of the variable" + }, + { + "name": "payload", + "type": "void *", + "comment": "opaque pointer to pass to the callback" + } + ], + "argline": "const git_config *cfg, const char *name, const char *regexp, git_config_foreach_cb callback, void *payload", + "sig": "const git_config *::const char *::const char *::git_config_foreach_cb::void *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Get each value of a multivar in a foreach callback

\n", + "comments": "

The callback will be called on each variable found

\n", + "group": "config" + }, + "git_config_multivar_iterator_new": { + "type": "function", + "file": "config.h", + "line": 419, + "lineto": 419, + "args": [ + { + "name": "out", + "type": "git_config_iterator **", + "comment": "pointer to store the iterator" + }, + { + "name": "cfg", + "type": "const git_config *", + "comment": "where to look for the variable" + }, + { + "name": "name", + "type": "const char *", + "comment": "the variable's name" + }, + { + "name": "regexp", + "type": "const char *", + "comment": "regular expression to filter which variables we're\n interested in. Use NULL to indicate all" + } + ], + "argline": "git_config_iterator **out, const git_config *cfg, const char *name, const char *regexp", + "sig": "git_config_iterator **::const git_config *::const char *::const char *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Get each value of a multivar

\n", + "comments": "", + "group": "config" + }, + "git_config_next": { + "type": "function", + "file": "config.h", + "line": 431, + "lineto": 431, + "args": [ + { + "name": "entry", + "type": "git_config_entry **", + "comment": "pointer to store the entry" + }, + { + "name": "iter", + "type": "git_config_iterator *", + "comment": "the iterator" + } + ], + "argline": "git_config_entry **entry, git_config_iterator *iter", + "sig": "git_config_entry **::git_config_iterator *", + "return": { + "type": "int", + "comment": " 0 or an error code. GIT_ITEROVER if the iteration has completed" + }, + "description": "

Return the current entry and advance the iterator

\n", + "comments": "

The pointers returned by this function are valid until the iterator is freed.

\n", + "group": "config" + }, + "git_config_iterator_free": { + "type": "function", + "file": "config.h", + "line": 438, + "lineto": 438, + "args": [ + { + "name": "iter", + "type": "git_config_iterator *", + "comment": "the iterator to free" + } + ], + "argline": "git_config_iterator *iter", + "sig": "git_config_iterator *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Free a config iterator

\n", + "comments": "", + "group": "config" + }, + "git_config_set_int32": { + "type": "function", + "file": "config.h", + "line": 449, + "lineto": 449, + "args": [ + { + "name": "cfg", + "type": "git_config *", + "comment": "where to look for the variable" + }, + { + "name": "name", + "type": "const char *", + "comment": "the variable's name" + }, + { + "name": "value", + "type": "int32_t", + "comment": "Integer value for the variable" + } + ], + "argline": "git_config *cfg, const char *name, int32_t value", + "sig": "git_config *::const char *::int32_t", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Set the value of an integer config variable in the config file\n with the highest level (usually the local one).

\n", + "comments": "", + "group": "config" + }, + "git_config_set_int64": { + "type": "function", + "file": "config.h", + "line": 460, + "lineto": 460, + "args": [ + { + "name": "cfg", + "type": "git_config *", + "comment": "where to look for the variable" + }, + { + "name": "name", + "type": "const char *", + "comment": "the variable's name" + }, + { + "name": "value", + "type": "int64_t", + "comment": "Long integer value for the variable" + } + ], + "argline": "git_config *cfg, const char *name, int64_t value", + "sig": "git_config *::const char *::int64_t", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Set the value of a long integer config variable in the config file\n with the highest level (usually the local one).

\n", + "comments": "", + "group": "config" + }, + "git_config_set_bool": { + "type": "function", + "file": "config.h", + "line": 471, + "lineto": 471, + "args": [ + { + "name": "cfg", + "type": "git_config *", + "comment": "where to look for the variable" + }, + { + "name": "name", + "type": "const char *", + "comment": "the variable's name" + }, + { + "name": "value", + "type": "int", + "comment": "the value to store" + } + ], + "argline": "git_config *cfg, const char *name, int value", + "sig": "git_config *::const char *::int", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Set the value of a boolean config variable in the config file\n with the highest level (usually the local one).

\n", + "comments": "", + "group": "config" + }, + "git_config_set_string": { + "type": "function", + "file": "config.h", + "line": 485, + "lineto": 485, + "args": [ + { + "name": "cfg", + "type": "git_config *", + "comment": "where to look for the variable" + }, + { + "name": "name", + "type": "const char *", + "comment": "the variable's name" + }, + { + "name": "value", + "type": "const char *", + "comment": "the string to store." + } + ], + "argline": "git_config *cfg, const char *name, const char *value", + "sig": "git_config *::const char *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Set the value of a string config variable in the config file\n with the highest level (usually the local one).

\n", + "comments": "

A copy of the string is made and the user is free to use it afterwards.

\n", + "group": "config" + }, + "git_config_set_multivar": { + "type": "function", + "file": "config.h", + "line": 495, + "lineto": 495, + "args": [ + { + "name": "cfg", + "type": "git_config *", + "comment": "where to look for the variable" + }, + { + "name": "name", + "type": "const char *", + "comment": "the variable's name" + }, + { + "name": "regexp", + "type": "const char *", + "comment": "a regular expression to indicate which values to replace" + }, + { + "name": "value", + "type": "const char *", + "comment": "the new value." + } + ], + "argline": "git_config *cfg, const char *name, const char *regexp, const char *value", + "sig": "git_config *::const char *::const char *::const char *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Set a multivar in the local config file.

\n", + "comments": "", + "group": "config" + }, + "git_config_delete_entry": { + "type": "function", + "file": "config.h", + "line": 504, + "lineto": 504, + "args": [ + { + "name": "cfg", + "type": "git_config *", + "comment": "the configuration" + }, + { + "name": "name", + "type": "const char *", + "comment": "the variable to delete" + } + ], + "argline": "git_config *cfg, const char *name", + "sig": "git_config *::const char *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Delete a config variable from the config file\n with the highest level (usually the local one).

\n", + "comments": "", + "group": "config" + }, + "git_config_delete_multivar": { + "type": "function", + "file": "config.h", + "line": 515, + "lineto": 515, + "args": [ + { + "name": "cfg", + "type": "git_config *", + "comment": "where to look for the variables" + }, + { + "name": "name", + "type": "const char *", + "comment": "the variable's name" + }, + { + "name": "regexp", + "type": "const char *", + "comment": "a regular expression to indicate which values to delete" + } + ], + "argline": "git_config *cfg, const char *name, const char *regexp", + "sig": "git_config *::const char *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Deletes one or several entries from a multivar in the local config file.

\n", + "comments": "", + "group": "config" + }, + "git_config_foreach": { + "type": "function", + "file": "config.h", + "line": 533, + "lineto": 536, + "args": [ + { + "name": "cfg", + "type": "const git_config *", + "comment": "where to get the variables from" + }, + { + "name": "callback", + "type": "git_config_foreach_cb", + "comment": "the function to call on each variable" + }, + { + "name": "payload", + "type": "void *", + "comment": "the data to pass to the callback" + } + ], + "argline": "const git_config *cfg, git_config_foreach_cb callback, void *payload", + "sig": "const git_config *::git_config_foreach_cb::void *", + "return": { + "type": "int", + "comment": " 0 on success, non-zero callback return value, or error code" + }, + "description": "

Perform an operation on each config variable.

\n", + "comments": "

The callback receives the normalized name and value of each variable in the config backend, and the data pointer passed to this function. If the callback returns a non-zero value, the function stops iterating and returns that value to the caller.

\n\n

The pointers passed to the callback are only valid as long as the iteration is ongoing.

\n", + "group": "config" + }, + "git_config_iterator_new": { + "type": "function", + "file": "config.h", + "line": 547, + "lineto": 547, + "args": [ + { + "name": "out", + "type": "git_config_iterator **", + "comment": "pointer to store the iterator" + }, + { + "name": "cfg", + "type": "const git_config *", + "comment": "where to ge the variables from" + } + ], + "argline": "git_config_iterator **out, const git_config *cfg", + "sig": "git_config_iterator **::const git_config *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Iterate over all the config variables

\n", + "comments": "

Use git_config_next to advance the iteration and git_config_iterator_free when done.

\n", + "group": "config" + }, + "git_config_iterator_glob_new": { + "type": "function", + "file": "config.h", + "line": 559, + "lineto": 559, + "args": [ + { + "name": "out", + "type": "git_config_iterator **", + "comment": "pointer to store the iterator" + }, + { + "name": "cfg", + "type": "const git_config *", + "comment": "where to ge the variables from" + }, + { + "name": "regexp", + "type": "const char *", + "comment": "regular expression to match the names" + } + ], + "argline": "git_config_iterator **out, const git_config *cfg, const char *regexp", + "sig": "git_config_iterator **::const git_config *::const char *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Iterate over all the config variables whose name matches a pattern

\n", + "comments": "

Use git_config_next to advance the iteration and git_config_iterator_free when done.

\n", + "group": "config" + }, + "git_config_foreach_match": { + "type": "function", + "file": "config.h", + "line": 577, + "lineto": 581, + "args": [ + { + "name": "cfg", + "type": "const git_config *", + "comment": "where to get the variables from" + }, + { + "name": "regexp", + "type": "const char *", + "comment": "regular expression to match against config names" + }, + { + "name": "callback", + "type": "git_config_foreach_cb", + "comment": "the function to call on each variable" + }, + { + "name": "payload", + "type": "void *", + "comment": "the data to pass to the callback" + } + ], + "argline": "const git_config *cfg, const char *regexp, git_config_foreach_cb callback, void *payload", + "sig": "const git_config *::const char *::git_config_foreach_cb::void *", + "return": { + "type": "int", + "comment": " 0 or the return value of the callback which didn't return 0" + }, + "description": "

Perform an operation on each config variable matching a regular expression.

\n", + "comments": "

This behaviors like git_config_foreach with an additional filter of a regular expression that filters which config keys are passed to the callback.

\n\n

The pointers passed to the callback are only valid as long as the iteration is ongoing.

\n", + "group": "config" + }, + "git_config_get_mapped": { + "type": "function", + "file": "config.h", + "line": 617, + "lineto": 622, + "args": [ + { + "name": "out", + "type": "int *", + "comment": "place to store the result of the mapping" + }, + { + "name": "cfg", + "type": "const git_config *", + "comment": "config file to get the variables from" + }, + { + "name": "name", + "type": "const char *", + "comment": "name of the config variable to lookup" + }, + { + "name": "maps", + "type": "const git_cvar_map *", + "comment": "array of `git_cvar_map` objects specifying the possible mappings" + }, + { + "name": "map_n", + "type": "size_t", + "comment": "number of mapping objects in `maps`" + } + ], + "argline": "int *out, const git_config *cfg, const char *name, const git_cvar_map *maps, size_t map_n", + "sig": "int *::const git_config *::const char *::const git_cvar_map *::size_t", + "return": { + "type": "int", + "comment": " 0 on success, error code otherwise" + }, + "description": "

Query the value of a config variable and return it mapped to\n an integer constant.

\n", + "comments": "

This is a helper method to easily map different possible values to a variable to integer constants that easily identify them.

\n\n

A mapping array looks as follows:

\n\n
git_cvar_map autocrlf_mapping[] = {     {GIT_CVAR_FALSE, NULL, GIT_AUTO_CRLF_FALSE},        {GIT_CVAR_TRUE, NULL, GIT_AUTO_CRLF_TRUE},      {GIT_CVAR_STRING, "input", GIT_AUTO_CRLF_INPUT},        {GIT_CVAR_STRING, "default", GIT_AUTO_CRLF_DEFAULT}};\n
\n\n

On any "false" value for the variable (e.g. "false", "FALSE", "no"), the mapping will store GIT_AUTO_CRLF_FALSE in the out parameter.

\n\n

The same thing applies for any "true" value such as "true", "yes" or "1", storing the GIT_AUTO_CRLF_TRUE variable.

\n\n

Otherwise, if the value matches the string "input" (with case insensitive comparison), the given constant will be stored in out, and likewise for "default".

\n\n

If not a single match can be made to store in out, an error code will be returned.

\n", + "group": "config" + }, + "git_config_lookup_map_value": { + "type": "function", + "file": "config.h", + "line": 632, + "lineto": 636, + "args": [ + { + "name": "out", + "type": "int *", + "comment": "place to store the result of the parsing" + }, + { + "name": "maps", + "type": "const git_cvar_map *", + "comment": "array of `git_cvar_map` objects specifying the possible mappings" + }, + { + "name": "map_n", + "type": "size_t", + "comment": "number of mapping objects in `maps`" + }, + { + "name": "value", + "type": "const char *", + "comment": "value to parse" + } + ], + "argline": "int *out, const git_cvar_map *maps, size_t map_n, const char *value", + "sig": "int *::const git_cvar_map *::size_t::const char *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Maps a string value to an integer constant

\n", + "comments": "", + "group": "config" + }, + "git_config_parse_bool": { + "type": "function", + "file": "config.h", + "line": 648, + "lineto": 648, + "args": [ + { + "name": "out", + "type": "int *", + "comment": "place to store the result of the parsing" + }, + { + "name": "value", + "type": "const char *", + "comment": "value to parse" + } + ], + "argline": "int *out, const char *value", + "sig": "int *::const char *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Parse a string value as a bool.

\n", + "comments": "

Valid values for true are: 'true', 'yes', 'on', 1 or any number different from 0 Valid values for false are: 'false', 'no', 'off', 0

\n", + "group": "config" + }, + "git_config_parse_int32": { + "type": "function", + "file": "config.h", + "line": 660, + "lineto": 660, + "args": [ + { + "name": "out", + "type": "int32_t *", + "comment": "place to store the result of the parsing" + }, + { + "name": "value", + "type": "const char *", + "comment": "value to parse" + } + ], + "argline": "int32_t *out, const char *value", + "sig": "int32_t *::const char *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Parse a string value as an int32.

\n", + "comments": "

An optional value suffix of 'k', 'm', or 'g' will cause the value to be multiplied by 1024, 1048576, or 1073741824 prior to output.

\n", + "group": "config" + }, + "git_config_parse_int64": { + "type": "function", + "file": "config.h", + "line": 672, + "lineto": 672, + "args": [ + { + "name": "out", + "type": "int64_t *", + "comment": "place to store the result of the parsing" + }, + { + "name": "value", + "type": "const char *", + "comment": "value to parse" + } + ], + "argline": "int64_t *out, const char *value", + "sig": "int64_t *::const char *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Parse a string value as an int64.

\n", + "comments": "

An optional value suffix of 'k', 'm', or 'g' will cause the value to be multiplied by 1024, 1048576, or 1073741824 prior to output.

\n", + "group": "config" + }, + "git_config_parse_path": { + "type": "function", + "file": "config.h", + "line": 687, + "lineto": 687, + "args": [ + { + "name": "out", + "type": "git_buf *", + "comment": "placae to store the result of parsing" + }, + { + "name": "value", + "type": "const char *", + "comment": "the path to evaluate" + } + ], + "argline": "git_buf *out, const char *value", + "sig": "git_buf *::const char *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Parse a string value as a path.

\n", + "comments": "

A leading '~' will be expanded to the global search path (which defaults to the user's home directory but can be overridden via git_libgit2_opts().

\n\n

If the value does not begin with a tilde, the input will be returned.

\n", + "group": "config" + }, + "git_config_backend_foreach_match": { + "type": "function", + "file": "config.h", + "line": 701, + "lineto": 705, + "args": [ + { + "name": "backend", + "type": "git_config_backend *", + "comment": "where to get the variables from" + }, + { + "name": "regexp", + "type": "const char *", + "comment": "regular expression to match against config names (can be NULL)" + }, + { + "name": "callback", + "type": "git_config_foreach_cb", + "comment": "the function to call on each variable" + }, + { + "name": "payload", + "type": "void *", + "comment": "the data to pass to the callback" + } + ], + "argline": "git_config_backend *backend, const char *regexp, git_config_foreach_cb callback, void *payload", + "sig": "git_config_backend *::const char *::git_config_foreach_cb::void *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Perform an operation on each config variable in given config backend\n matching a regular expression.

\n", + "comments": "

This behaviors like git_config_foreach_match except instead of all config entries it just enumerates through the given backend entry.

\n", + "group": "config" + }, + "git_config_lock": { + "type": "function", + "file": "config.h", + "line": 724, + "lineto": 724, + "args": [ + { + "name": "tx", + "type": "git_transaction **", + "comment": "the resulting transaction, use this to commit or undo the\n changes" + }, + { + "name": "cfg", + "type": "git_config *", + "comment": "the configuration in which to lock" + } + ], + "argline": "git_transaction **tx, git_config *cfg", + "sig": "git_transaction **::git_config *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Lock the backend with the highest priority

\n", + "comments": "

Locking disallows anybody else from writing to that backend. Any updates made after locking will not be visible to a reader until the file is unlocked.

\n\n

You can apply the changes by calling git_transaction_commit() before freeing the transaction. Either of these actions will unlock the config.

\n", + "group": "config" + }, + "git_cred_userpass": { + "type": "function", + "file": "cred_helpers.h", + "line": 43, + "lineto": 48, + "args": [ + { + "name": "cred", + "type": "git_cred **", + "comment": "The newly created credential object." + }, + { + "name": "url", + "type": "const char *", + "comment": "The resource for which we are demanding a credential." + }, + { + "name": "user_from_url", + "type": "const char *", + "comment": "The username that was embedded in a \"user\n@\nhost\"\n remote url, or NULL if not included." + }, + { + "name": "allowed_types", + "type": "unsigned int", + "comment": "A bitmask stating which cred types are OK to return." + }, + { + "name": "payload", + "type": "void *", + "comment": "The payload provided when specifying this callback. (This is\n interpreted as a `git_cred_userpass_payload*`.)" + } + ], + "argline": "git_cred **cred, const char *url, const char *user_from_url, unsigned int allowed_types, void *payload", + "sig": "git_cred **::const char *::const char *::unsigned int::void *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Stock callback usable as a git_cred_acquire_cb. This calls\n git_cred_userpass_plaintext_new unless the protocol has not specified\n GIT_CREDTYPE_USERPASS_PLAINTEXT as an allowed type.

\n", + "comments": "", + "group": "cred" + }, + "git_describe_commit": { + "type": "function", + "file": "describe.h", + "line": 120, + "lineto": 123, + "args": [ + { + "name": "result", + "type": "git_describe_result **", + "comment": "pointer to store the result. You must free this once\n you're done with it." + }, + { + "name": "committish", + "type": "git_object *", + "comment": "a committish to describe" + }, + { + "name": "opts", + "type": "git_describe_options *", + "comment": "the lookup options" + } + ], + "argline": "git_describe_result **result, git_object *committish, git_describe_options *opts", + "sig": "git_describe_result **::git_object *::git_describe_options *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Describe a commit

\n", + "comments": "

Perform the describe operation on the given committish object.

\n", + "group": "describe", + "examples": { + "describe.c": [ + "ex/HEAD/describe.html#git_describe_commit-1" + ] + } + }, + "git_describe_workdir": { + "type": "function", + "file": "describe.h", + "line": 137, + "lineto": 140, + "args": [ + { + "name": "out", + "type": "git_describe_result **", + "comment": "pointer to store the result. You must free this once\n you're done with it." + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository in which to perform the describe" + }, + { + "name": "opts", + "type": "git_describe_options *", + "comment": "the lookup options" + } + ], + "argline": "git_describe_result **out, git_repository *repo, git_describe_options *opts", + "sig": "git_describe_result **::git_repository *::git_describe_options *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Describe a commit

\n", + "comments": "

Perform the describe operation on the current commit and the worktree. After peforming describe on HEAD, a status is run and the description is considered to be dirty if there are.

\n", + "group": "describe", + "examples": { + "describe.c": [ + "ex/HEAD/describe.html#git_describe_workdir-2" + ] + } + }, + "git_describe_format": { + "type": "function", + "file": "describe.h", + "line": 150, + "lineto": 153, + "args": [ + { + "name": "out", + "type": "git_buf *", + "comment": "The buffer to store the result" + }, + { + "name": "result", + "type": "const git_describe_result *", + "comment": "the result from `git_describe_commit()` or\n `git_describe_workdir()`." + }, + { + "name": "opts", + "type": "const git_describe_format_options *", + "comment": "the formatting options" + } + ], + "argline": "git_buf *out, const git_describe_result *result, const git_describe_format_options *opts", + "sig": "git_buf *::const git_describe_result *::const git_describe_format_options *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Print the describe result to a buffer

\n", + "comments": "", + "group": "describe", + "examples": { + "describe.c": [ + "ex/HEAD/describe.html#git_describe_format-3" + ] + } + }, + "git_describe_result_free": { + "type": "function", + "file": "describe.h", + "line": 158, + "lineto": 158, + "args": [ + { + "name": "result", + "type": "git_describe_result *", + "comment": null + } + ], + "argline": "git_describe_result *result", + "sig": "git_describe_result *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Free the describe result.

\n", + "comments": "", + "group": "describe" + }, + "git_diff_init_options": { + "type": "function", + "file": "diff.h", + "line": 435, + "lineto": 437, + "args": [ + { + "name": "opts", + "type": "git_diff_options *", + "comment": "The `git_diff_options` struct to initialize" + }, + { + "name": "version", + "type": "unsigned int", + "comment": "Version of struct; pass `GIT_DIFF_OPTIONS_VERSION`" + } + ], + "argline": "git_diff_options *opts, unsigned int version", + "sig": "git_diff_options *::unsigned int", + "return": { + "type": "int", + "comment": " Zero on success; -1 on failure." + }, + "description": "

Initializes a git_diff_options with default values. Equivalent to\n creating an instance with GIT_DIFF_OPTIONS_INIT.

\n", + "comments": "", + "group": "diff" + }, + "git_diff_find_init_options": { + "type": "function", + "file": "diff.h", + "line": 720, + "lineto": 722, + "args": [ + { + "name": "opts", + "type": "git_diff_find_options *", + "comment": "The `git_diff_find_options` struct to initialize" + }, + { + "name": "version", + "type": "unsigned int", + "comment": "Version of struct; pass `GIT_DIFF_FIND_OPTIONS_VERSION`" + } + ], + "argline": "git_diff_find_options *opts, unsigned int version", + "sig": "git_diff_find_options *::unsigned int", + "return": { + "type": "int", + "comment": " Zero on success; -1 on failure." + }, + "description": "

Initializes a git_diff_find_options with default values. Equivalent to\n creating an instance with GIT_DIFF_FIND_OPTIONS_INIT.

\n", + "comments": "", + "group": "diff" + }, + "git_diff_free": { + "type": "function", + "file": "diff.h", + "line": 736, + "lineto": 736, + "args": [ + { + "name": "diff", + "type": "git_diff *", + "comment": "The previously created diff; cannot be used after free." + } + ], + "argline": "git_diff *diff", + "sig": "git_diff *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Deallocate a diff.

\n", + "comments": "", + "group": "diff", + "examples": { + "diff.c": [ + "ex/HEAD/diff.html#git_diff_free-2" + ], + "log.c": [ + "ex/HEAD/log.html#git_diff_free-24", + "ex/HEAD/log.html#git_diff_free-25" + ] + } + }, + "git_diff_tree_to_tree": { + "type": "function", + "file": "diff.h", + "line": 754, + "lineto": 759, + "args": [ + { + "name": "diff", + "type": "git_diff **", + "comment": "Output pointer to a git_diff pointer to be allocated." + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "The repository containing the trees." + }, + { + "name": "old_tree", + "type": "git_tree *", + "comment": "A git_tree object to diff from, or NULL for empty tree." + }, + { + "name": "new_tree", + "type": "git_tree *", + "comment": "A git_tree object to diff to, or NULL for empty tree." + }, + { + "name": "opts", + "type": "const git_diff_options *", + "comment": "Structure with options to influence diff or NULL for defaults." + } + ], + "argline": "git_diff **diff, git_repository *repo, git_tree *old_tree, git_tree *new_tree, const git_diff_options *opts", + "sig": "git_diff **::git_repository *::git_tree *::git_tree *::const git_diff_options *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Create a diff with the difference between two tree objects.

\n", + "comments": "

This is equivalent to git diff <old-tree> <new-tree>

\n\n

The first tree will be used for the "old_file" side of the delta and the second tree will be used for the "new_file" side of the delta. You can pass NULL to indicate an empty tree, although it is an error to pass NULL for both the old_tree and new_tree.

\n", + "group": "diff", + "examples": { + "diff.c": [ + "ex/HEAD/diff.html#git_diff_tree_to_tree-3" + ], + "log.c": [ + "ex/HEAD/log.html#git_diff_tree_to_tree-26", + "ex/HEAD/log.html#git_diff_tree_to_tree-27" + ] + } + }, + "git_diff_tree_to_index": { + "type": "function", + "file": "diff.h", + "line": 780, + "lineto": 785, + "args": [ + { + "name": "diff", + "type": "git_diff **", + "comment": "Output pointer to a git_diff pointer to be allocated." + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "The repository containing the tree and index." + }, + { + "name": "old_tree", + "type": "git_tree *", + "comment": "A git_tree object to diff from, or NULL for empty tree." + }, + { + "name": "index", + "type": "git_index *", + "comment": "The index to diff with; repo index used if NULL." + }, + { + "name": "opts", + "type": "const git_diff_options *", + "comment": "Structure with options to influence diff or NULL for defaults." + } + ], + "argline": "git_diff **diff, git_repository *repo, git_tree *old_tree, git_index *index, const git_diff_options *opts", + "sig": "git_diff **::git_repository *::git_tree *::git_index *::const git_diff_options *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Create a diff between a tree and repository index.

\n", + "comments": "

This is equivalent to git diff --cached <treeish> or if you pass the HEAD tree, then like git diff --cached.

\n\n

The tree you pass will be used for the "old_file" side of the delta, and the index will be used for the "new_file" side of the delta.

\n\n

If you pass NULL for the index, then the existing index of the repo will be used. In this case, the index will be refreshed from disk (if it has changed) before the diff is generated.

\n", + "group": "diff", + "examples": { + "diff.c": [ + "ex/HEAD/diff.html#git_diff_tree_to_index-4" + ] + } + }, + "git_diff_index_to_workdir": { + "type": "function", + "file": "diff.h", + "line": 807, + "lineto": 811, + "args": [ + { + "name": "diff", + "type": "git_diff **", + "comment": "Output pointer to a git_diff pointer to be allocated." + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "The repository." + }, + { + "name": "index", + "type": "git_index *", + "comment": "The index to diff from; repo index used if NULL." + }, + { + "name": "opts", + "type": "const git_diff_options *", + "comment": "Structure with options to influence diff or NULL for defaults." + } + ], + "argline": "git_diff **diff, git_repository *repo, git_index *index, const git_diff_options *opts", + "sig": "git_diff **::git_repository *::git_index *::const git_diff_options *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Create a diff between the repository index and the workdir directory.

\n", + "comments": "

This matches the git diff command. See the note below on git_diff_tree_to_workdir for a discussion of the difference between git diff and git diff HEAD and how to emulate a git diff <treeish> using libgit2.

\n\n

The index will be used for the "old_file" side of the delta, and the working directory will be used for the "new_file" side of the delta.

\n\n

If you pass NULL for the index, then the existing index of the repo will be used. In this case, the index will be refreshed from disk (if it has changed) before the diff is generated.

\n", + "group": "diff", + "examples": { + "diff.c": [ + "ex/HEAD/diff.html#git_diff_index_to_workdir-5" + ] + } + }, + "git_diff_tree_to_workdir": { + "type": "function", + "file": "diff.h", + "line": 836, + "lineto": 840, + "args": [ + { + "name": "diff", + "type": "git_diff **", + "comment": "A pointer to a git_diff pointer that will be allocated." + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "The repository containing the tree." + }, + { + "name": "old_tree", + "type": "git_tree *", + "comment": "A git_tree object to diff from, or NULL for empty tree." + }, + { + "name": "opts", + "type": "const git_diff_options *", + "comment": "Structure with options to influence diff or NULL for defaults." + } + ], + "argline": "git_diff **diff, git_repository *repo, git_tree *old_tree, const git_diff_options *opts", + "sig": "git_diff **::git_repository *::git_tree *::const git_diff_options *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Create a diff between a tree and the working directory.

\n", + "comments": "

The tree you provide will be used for the "old_file" side of the delta, and the working directory will be used for the "new_file" side.

\n\n

This is not the same as git diff <treeish> or git diff-index <treeish>. Those commands use information from the index, whereas this function strictly returns the differences between the tree and the files in the working directory, regardless of the state of the index. Use git_diff_tree_to_workdir_with_index to emulate those commands.

\n\n

To see difference between this and git_diff_tree_to_workdir_with_index, consider the example of a staged file deletion where the file has then been put back into the working dir and further modified. The tree-to-workdir diff for that file is 'modified', but git diff would show status 'deleted' since there is a staged delete.

\n", + "group": "diff", + "examples": { + "diff.c": [ + "ex/HEAD/diff.html#git_diff_tree_to_workdir-6" + ] + } + }, + "git_diff_tree_to_workdir_with_index": { + "type": "function", + "file": "diff.h", + "line": 855, + "lineto": 859, + "args": [ + { + "name": "diff", + "type": "git_diff **", + "comment": "A pointer to a git_diff pointer that will be allocated." + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "The repository containing the tree." + }, + { + "name": "old_tree", + "type": "git_tree *", + "comment": "A git_tree object to diff from, or NULL for empty tree." + }, + { + "name": "opts", + "type": "const git_diff_options *", + "comment": "Structure with options to influence diff or NULL for defaults." + } + ], + "argline": "git_diff **diff, git_repository *repo, git_tree *old_tree, const git_diff_options *opts", + "sig": "git_diff **::git_repository *::git_tree *::const git_diff_options *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Create a diff between a tree and the working directory using index data\n to account for staged deletes, tracked files, etc.

\n", + "comments": "

This emulates git diff <tree> by diffing the tree to the index and the index to the working directory and blending the results into a single diff that includes staged deleted, etc.

\n", + "group": "diff", + "examples": { + "diff.c": [ + "ex/HEAD/diff.html#git_diff_tree_to_workdir_with_index-7" + ] + } + }, + "git_diff_index_to_index": { + "type": "function", + "file": "diff.h", + "line": 873, + "lineto": 878, + "args": [ + { + "name": "diff", + "type": "git_diff **", + "comment": "Output pointer to a git_diff pointer to be allocated." + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "The repository containing the indexes." + }, + { + "name": "old_index", + "type": "git_index *", + "comment": "A git_index object to diff from." + }, + { + "name": "new_index", + "type": "git_index *", + "comment": "A git_index object to diff to." + }, + { + "name": "opts", + "type": "const git_diff_options *", + "comment": "Structure with options to influence diff or NULL for defaults." + } + ], + "argline": "git_diff **diff, git_repository *repo, git_index *old_index, git_index *new_index, const git_diff_options *opts", + "sig": "git_diff **::git_repository *::git_index *::git_index *::const git_diff_options *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Create a diff with the difference between two index objects.

\n", + "comments": "

The first index will be used for the "old_file" side of the delta and the second index will be used for the "new_file" side of the delta.

\n", + "group": "diff" + }, + "git_diff_merge": { + "type": "function", + "file": "diff.h", + "line": 893, + "lineto": 895, + "args": [ + { + "name": "onto", + "type": "git_diff *", + "comment": "Diff to merge into." + }, + { + "name": "from", + "type": "const git_diff *", + "comment": "Diff to merge." + } + ], + "argline": "git_diff *onto, const git_diff *from", + "sig": "git_diff *::const git_diff *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Merge one diff into another.

\n", + "comments": "

This merges items from the "from" list into the "onto" list. The resulting diff will have all items that appear in either list. If an item appears in both lists, then it will be "merged" to appear as if the old version was from the "onto" list and the new version is from the "from" list (with the exception that if the item has a pending DELETE in the middle, then it will show as deleted).

\n", + "group": "diff" + }, + "git_diff_find_similar": { + "type": "function", + "file": "diff.h", + "line": 909, + "lineto": 911, + "args": [ + { + "name": "diff", + "type": "git_diff *", + "comment": "diff to run detection algorithms on" + }, + { + "name": "options", + "type": "const git_diff_find_options *", + "comment": "Control how detection should be run, NULL for defaults" + } + ], + "argline": "git_diff *diff, const git_diff_find_options *options", + "sig": "git_diff *::const git_diff_find_options *", + "return": { + "type": "int", + "comment": " 0 on success, -1 on failure" + }, + "description": "

Transform a diff marking file renames, copies, etc.

\n", + "comments": "

This modifies a diff in place, replacing old entries that look like renames or copies with new entries reflecting those changes. This also will, if requested, break modified files into add/remove pairs if the amount of change is above a threshold.

\n", + "group": "diff", + "examples": { + "diff.c": [ + "ex/HEAD/diff.html#git_diff_find_similar-8" + ] + } + }, + "git_diff_num_deltas": { + "type": "function", + "file": "diff.h", + "line": 929, + "lineto": 929, + "args": [ + { + "name": "diff", + "type": "const git_diff *", + "comment": "A git_diff generated by one of the above functions" + } + ], + "argline": "const git_diff *diff", + "sig": "const git_diff *", + "return": { + "type": "size_t", + "comment": " Count of number of deltas in the list" + }, + "description": "

Query how many diff records are there in a diff.

\n", + "comments": "", + "group": "diff", + "examples": { + "log.c": [ + "ex/HEAD/log.html#git_diff_num_deltas-28" + ] + } + }, + "git_diff_num_deltas_of_type": { + "type": "function", + "file": "diff.h", + "line": 942, + "lineto": 943, + "args": [ + { + "name": "diff", + "type": "const git_diff *", + "comment": "A git_diff generated by one of the above functions" + }, + { + "name": "type", + "type": "git_delta_t", + "comment": "A git_delta_t value to filter the count" + } + ], + "argline": "const git_diff *diff, git_delta_t type", + "sig": "const git_diff *::git_delta_t", + "return": { + "type": "size_t", + "comment": " Count of number of deltas matching delta_t type" + }, + "description": "

Query how many diff deltas are there in a diff filtered by type.

\n", + "comments": "

This works just like git_diff_entrycount() with an extra parameter that is a git_delta_t and returns just the count of how many deltas match that particular type.

\n", + "group": "diff" + }, + "git_diff_get_delta": { + "type": "function", + "file": "diff.h", + "line": 962, + "lineto": 963, + "args": [ + { + "name": "diff", + "type": "const git_diff *", + "comment": "Diff list object" + }, + { + "name": "idx", + "type": "size_t", + "comment": "Index into diff list" + } + ], + "argline": "const git_diff *diff, size_t idx", + "sig": "const git_diff *::size_t", + "return": { + "type": "const git_diff_delta *", + "comment": " Pointer to git_diff_delta (or NULL if `idx` out of range)" + }, + "description": "

Return the diff delta for an entry in the diff list.

\n", + "comments": "

The git_diff_delta pointer points to internal data and you do not have to release it when you are done with it. It will go away when the * git_diff (or any associated git_patch) goes away.

\n\n

Note that the flags on the delta related to whether it has binary content or not may not be set if there are no attributes set for the file and there has been no reason to load the file data at this point. For now, if you need those flags to be up to date, your only option is to either use git_diff_foreach or create a git_patch.

\n", + "group": "diff" + }, + "git_diff_is_sorted_icase": { + "type": "function", + "file": "diff.h", + "line": 971, + "lineto": 971, + "args": [ + { + "name": "diff", + "type": "const git_diff *", + "comment": "diff to check" + } + ], + "argline": "const git_diff *diff", + "sig": "const git_diff *", + "return": { + "type": "int", + "comment": " 0 if case sensitive, 1 if case is ignored" + }, + "description": "

Check if deltas are sorted case sensitively or insensitively.

\n", + "comments": "", + "group": "diff" + }, + "git_diff_foreach": { + "type": "function", + "file": "diff.h", + "line": 999, + "lineto": 1005, + "args": [ + { + "name": "diff", + "type": "git_diff *", + "comment": "A git_diff generated by one of the above functions." + }, + { + "name": "file_cb", + "type": "git_diff_file_cb", + "comment": "Callback function to make per file in the diff." + }, + { + "name": "binary_cb", + "type": "git_diff_binary_cb", + "comment": "Optional callback to make for binary files." + }, + { + "name": "hunk_cb", + "type": "git_diff_hunk_cb", + "comment": "Optional callback to make per hunk of text diff. This\n callback is called to describe a range of lines in the\n diff. It will not be issued for binary files." + }, + { + "name": "line_cb", + "type": "git_diff_line_cb", + "comment": "Optional callback to make per line of diff text. This\n same callback will be made for context lines, added, and\n removed lines, and even for a deleted trailing newline." + }, + { + "name": "payload", + "type": "void *", + "comment": "Reference pointer that will be passed to your callbacks." + } + ], + "argline": "git_diff *diff, git_diff_file_cb file_cb, git_diff_binary_cb binary_cb, git_diff_hunk_cb hunk_cb, git_diff_line_cb line_cb, void *payload", + "sig": "git_diff *::git_diff_file_cb::git_diff_binary_cb::git_diff_hunk_cb::git_diff_line_cb::void *", + "return": { + "type": "int", + "comment": " 0 on success, non-zero callback return value, or error code" + }, + "description": "

Loop over all deltas in a diff issuing callbacks.

\n", + "comments": "

This will iterate through all of the files described in a diff. You should provide a file callback to learn about each file.

\n\n

The "hunk" and "line" callbacks are optional, and the text diff of the files will only be calculated if they are not NULL. Of course, these callbacks will not be invoked for binary files on the diff or for files whose only changed is a file mode change.

\n\n

Returning a non-zero value from any of the callbacks will terminate the iteration and return the value to the user.

\n", + "group": "diff" + }, + "git_diff_status_char": { + "type": "function", + "file": "diff.h", + "line": 1018, + "lineto": 1018, + "args": [ + { + "name": "status", + "type": "git_delta_t", + "comment": "The git_delta_t value to look up" + } + ], + "argline": "git_delta_t status", + "sig": "git_delta_t", + "return": { + "type": "char", + "comment": " The single character label for that code" + }, + "description": "

Look up the single character abbreviation for a delta status code.

\n", + "comments": "

When you run git diff --name-status it uses single letter codes in the output such as 'A' for added, 'D' for deleted, 'M' for modified, etc. This function converts a git_delta_t value into these letters for your own purposes. GIT_DELTA_UNTRACKED will return a space (i.e. ' ').

\n", + "group": "diff" + }, + "git_diff_print": { + "type": "function", + "file": "diff.h", + "line": 1043, + "lineto": 1047, + "args": [ + { + "name": "diff", + "type": "git_diff *", + "comment": "A git_diff generated by one of the above functions." + }, + { + "name": "format", + "type": "git_diff_format_t", + "comment": "A git_diff_format_t value to pick the text format." + }, + { + "name": "print_cb", + "type": "git_diff_line_cb", + "comment": "Callback to make per line of diff text." + }, + { + "name": "payload", + "type": "void *", + "comment": "Reference pointer that will be passed to your callback." + } + ], + "argline": "git_diff *diff, git_diff_format_t format, git_diff_line_cb print_cb, void *payload", + "sig": "git_diff *::git_diff_format_t::git_diff_line_cb::void *", + "return": { + "type": "int", + "comment": " 0 on success, non-zero callback return value, or error code" + }, + "description": "

Iterate over a diff generating formatted text output.

\n", + "comments": "

Returning a non-zero value from the callbacks will terminate the iteration and return the non-zero value to the caller.

\n", + "group": "diff", + "examples": { + "diff.c": [ + "ex/HEAD/diff.html#git_diff_print-9" + ], + "log.c": [ + "ex/HEAD/log.html#git_diff_print-29" + ] + } + }, + "git_diff_blobs": { + "type": "function", + "file": "diff.h", + "line": 1084, + "lineto": 1094, + "args": [ + { + "name": "old_blob", + "type": "const git_blob *", + "comment": "Blob for old side of diff, or NULL for empty blob" + }, + { + "name": "old_as_path", + "type": "const char *", + "comment": "Treat old blob as if it had this filename; can be NULL" + }, + { + "name": "new_blob", + "type": "const git_blob *", + "comment": "Blob for new side of diff, or NULL for empty blob" + }, + { + "name": "new_as_path", + "type": "const char *", + "comment": "Treat new blob as if it had this filename; can be NULL" + }, + { + "name": "options", + "type": "const git_diff_options *", + "comment": "Options for diff, or NULL for default options" + }, + { + "name": "file_cb", + "type": "git_diff_file_cb", + "comment": "Callback for \"file\"; made once if there is a diff; can be NULL" + }, + { + "name": "binary_cb", + "type": "git_diff_binary_cb", + "comment": "Callback for binary files; can be NULL" + }, + { + "name": "hunk_cb", + "type": "git_diff_hunk_cb", + "comment": "Callback for each hunk in diff; can be NULL" + }, + { + "name": "line_cb", + "type": "git_diff_line_cb", + "comment": "Callback for each line in diff; can be NULL" + }, + { + "name": "payload", + "type": "void *", + "comment": "Payload passed to each callback function" + } + ], + "argline": "const git_blob *old_blob, const char *old_as_path, const git_blob *new_blob, const char *new_as_path, const git_diff_options *options, git_diff_file_cb file_cb, git_diff_binary_cb binary_cb, git_diff_hunk_cb hunk_cb, git_diff_line_cb line_cb, void *payload", + "sig": "const git_blob *::const char *::const git_blob *::const char *::const git_diff_options *::git_diff_file_cb::git_diff_binary_cb::git_diff_hunk_cb::git_diff_line_cb::void *", + "return": { + "type": "int", + "comment": " 0 on success, non-zero callback return value, or error code" + }, + "description": "

Directly run a diff on two blobs.

\n", + "comments": "

Compared to a file, a blob lacks some contextual information. As such, the git_diff_file given to the callback will have some fake data; i.e. mode will be 0 and path will be NULL.

\n\n

NULL is allowed for either old_blob or new_blob and will be treated as an empty blob, with the oid set to NULL in the git_diff_file data. Passing NULL for both blobs is a noop; no callbacks will be made at all.

\n\n

We do run a binary content check on the blob content and if either blob looks like binary data, the git_diff_delta binary attribute will be set to 1 and no call to the hunk_cb nor line_cb will be made (unless you pass GIT_DIFF_FORCE_TEXT of course).

\n", + "group": "diff" + }, + "git_diff_blob_to_buffer": { + "type": "function", + "file": "diff.h", + "line": 1121, + "lineto": 1132, + "args": [ + { + "name": "old_blob", + "type": "const git_blob *", + "comment": "Blob for old side of diff, or NULL for empty blob" + }, + { + "name": "old_as_path", + "type": "const char *", + "comment": "Treat old blob as if it had this filename; can be NULL" + }, + { + "name": "buffer", + "type": "const char *", + "comment": "Raw data for new side of diff, or NULL for empty" + }, + { + "name": "buffer_len", + "type": "size_t", + "comment": "Length of raw data for new side of diff" + }, + { + "name": "buffer_as_path", + "type": "const char *", + "comment": "Treat buffer as if it had this filename; can be NULL" + }, + { + "name": "options", + "type": "const git_diff_options *", + "comment": "Options for diff, or NULL for default options" + }, + { + "name": "file_cb", + "type": "git_diff_file_cb", + "comment": "Callback for \"file\"; made once if there is a diff; can be NULL" + }, + { + "name": "binary_cb", + "type": "git_diff_binary_cb", + "comment": "Callback for binary files; can be NULL" + }, + { + "name": "hunk_cb", + "type": "git_diff_hunk_cb", + "comment": "Callback for each hunk in diff; can be NULL" + }, + { + "name": "line_cb", + "type": "git_diff_line_cb", + "comment": "Callback for each line in diff; can be NULL" + }, + { + "name": "payload", + "type": "void *", + "comment": "Payload passed to each callback function" + } + ], + "argline": "const git_blob *old_blob, const char *old_as_path, const char *buffer, size_t buffer_len, const char *buffer_as_path, const git_diff_options *options, git_diff_file_cb file_cb, git_diff_binary_cb binary_cb, git_diff_hunk_cb hunk_cb, git_diff_line_cb line_cb, void *payload", + "sig": "const git_blob *::const char *::const char *::size_t::const char *::const git_diff_options *::git_diff_file_cb::git_diff_binary_cb::git_diff_hunk_cb::git_diff_line_cb::void *", + "return": { + "type": "int", + "comment": " 0 on success, non-zero callback return value, or error code" + }, + "description": "

Directly run a diff between a blob and a buffer.

\n", + "comments": "

As with git_diff_blobs, comparing a blob and buffer lacks some context, so the git_diff_file parameters to the callbacks will be faked a la the rules for git_diff_blobs().

\n\n

Passing NULL for old_blob will be treated as an empty blob (i.e. the file_cb will be invoked with GIT_DELTA_ADDED and the diff will be the entire content of the buffer added). Passing NULL to the buffer will do the reverse, with GIT_DELTA_REMOVED and blob content removed.

\n", + "group": "diff" + }, + "git_diff_buffers": { + "type": "function", + "file": "diff.h", + "line": 1155, + "lineto": 1167, + "args": [ + { + "name": "old_buffer", + "type": "const void *", + "comment": "Raw data for old side of diff, or NULL for empty" + }, + { + "name": "old_len", + "type": "size_t", + "comment": "Length of the raw data for old side of the diff" + }, + { + "name": "old_as_path", + "type": "const char *", + "comment": "Treat old buffer as if it had this filename; can be NULL" + }, + { + "name": "new_buffer", + "type": "const void *", + "comment": "Raw data for new side of diff, or NULL for empty" + }, + { + "name": "new_len", + "type": "size_t", + "comment": "Length of raw data for new side of diff" + }, + { + "name": "new_as_path", + "type": "const char *", + "comment": "Treat buffer as if it had this filename; can be NULL" + }, + { + "name": "options", + "type": "const git_diff_options *", + "comment": "Options for diff, or NULL for default options" + }, + { + "name": "file_cb", + "type": "git_diff_file_cb", + "comment": "Callback for \"file\"; made once if there is a diff; can be NULL" + }, + { + "name": "binary_cb", + "type": "git_diff_binary_cb", + "comment": "Callback for binary files; can be NULL" + }, + { + "name": "hunk_cb", + "type": "git_diff_hunk_cb", + "comment": "Callback for each hunk in diff; can be NULL" + }, + { + "name": "line_cb", + "type": "git_diff_line_cb", + "comment": "Callback for each line in diff; can be NULL" + }, + { + "name": "payload", + "type": "void *", + "comment": "Payload passed to each callback function" + } + ], + "argline": "const void *old_buffer, size_t old_len, const char *old_as_path, const void *new_buffer, size_t new_len, const char *new_as_path, const git_diff_options *options, git_diff_file_cb file_cb, git_diff_binary_cb binary_cb, git_diff_hunk_cb hunk_cb, git_diff_line_cb line_cb, void *payload", + "sig": "const void *::size_t::const char *::const void *::size_t::const char *::const git_diff_options *::git_diff_file_cb::git_diff_binary_cb::git_diff_hunk_cb::git_diff_line_cb::void *", + "return": { + "type": "int", + "comment": " 0 on success, non-zero callback return value, or error code" + }, + "description": "

Directly run a diff between two buffers.

\n", + "comments": "

Even more than with git_diff_blobs, comparing two buffer lacks context, so the git_diff_file parameters to the callbacks will be faked a la the rules for git_diff_blobs().

\n", + "group": "diff" + }, + "git_diff_get_stats": { + "type": "function", + "file": "diff.h", + "line": 1203, + "lineto": 1205, + "args": [ + { + "name": "out", + "type": "git_diff_stats **", + "comment": "Structure containg the diff statistics." + }, + { + "name": "diff", + "type": "git_diff *", + "comment": "A git_diff generated by one of the above functions." + } + ], + "argline": "git_diff_stats **out, git_diff *diff", + "sig": "git_diff_stats **::git_diff *", + "return": { + "type": "int", + "comment": " 0 on success; non-zero on error" + }, + "description": "

Accumulate diff statistics for all patches.

\n", + "comments": "", + "group": "diff", + "examples": { + "diff.c": [ + "ex/HEAD/diff.html#git_diff_get_stats-10" + ] + } + }, + "git_diff_stats_files_changed": { + "type": "function", + "file": "diff.h", + "line": 1213, + "lineto": 1214, + "args": [ + { + "name": "stats", + "type": "const git_diff_stats *", + "comment": "A `git_diff_stats` generated by one of the above functions." + } + ], + "argline": "const git_diff_stats *stats", + "sig": "const git_diff_stats *", + "return": { + "type": "size_t", + "comment": " total number of files changed in the diff" + }, + "description": "

Get the total number of files changed in a diff

\n", + "comments": "", + "group": "diff" + }, + "git_diff_stats_insertions": { + "type": "function", + "file": "diff.h", + "line": 1222, + "lineto": 1223, + "args": [ + { + "name": "stats", + "type": "const git_diff_stats *", + "comment": "A `git_diff_stats` generated by one of the above functions." + } + ], + "argline": "const git_diff_stats *stats", + "sig": "const git_diff_stats *", + "return": { + "type": "size_t", + "comment": " total number of insertions in the diff" + }, + "description": "

Get the total number of insertions in a diff

\n", + "comments": "", + "group": "diff" + }, + "git_diff_stats_deletions": { + "type": "function", + "file": "diff.h", + "line": 1231, + "lineto": 1232, + "args": [ + { + "name": "stats", + "type": "const git_diff_stats *", + "comment": "A `git_diff_stats` generated by one of the above functions." + } + ], + "argline": "const git_diff_stats *stats", + "sig": "const git_diff_stats *", + "return": { + "type": "size_t", + "comment": " total number of deletions in the diff" + }, + "description": "

Get the total number of deletions in a diff

\n", + "comments": "", + "group": "diff" + }, + "git_diff_stats_to_buf": { + "type": "function", + "file": "diff.h", + "line": 1243, + "lineto": 1247, + "args": [ + { + "name": "out", + "type": "git_buf *", + "comment": "buffer to store the formatted diff statistics in." + }, + { + "name": "stats", + "type": "const git_diff_stats *", + "comment": "A `git_diff_stats` generated by one of the above functions." + }, + { + "name": "format", + "type": "git_diff_stats_format_t", + "comment": "Formatting option." + }, + { + "name": "width", + "type": "size_t", + "comment": "Target width for output (only affects GIT_DIFF_STATS_FULL)" + } + ], + "argline": "git_buf *out, const git_diff_stats *stats, git_diff_stats_format_t format, size_t width", + "sig": "git_buf *::const git_diff_stats *::git_diff_stats_format_t::size_t", + "return": { + "type": "int", + "comment": " 0 on success; non-zero on error" + }, + "description": "

Print diff statistics to a git_buf.

\n", + "comments": "", + "group": "diff", + "examples": { + "diff.c": [ + "ex/HEAD/diff.html#git_diff_stats_to_buf-11" + ] + } + }, + "git_diff_stats_free": { + "type": "function", + "file": "diff.h", + "line": 1255, + "lineto": 1255, + "args": [ + { + "name": "stats", + "type": "git_diff_stats *", + "comment": "The previously created statistics object;\n cannot be used after free." + } + ], + "argline": "git_diff_stats *stats", + "sig": "git_diff_stats *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Deallocate a git_diff_stats.

\n", + "comments": "", + "group": "diff", + "examples": { + "diff.c": [ + "ex/HEAD/diff.html#git_diff_stats_free-12" + ] + } + }, + "git_diff_format_email": { + "type": "function", + "file": "diff.h", + "line": 1307, + "lineto": 1310, + "args": [ + { + "name": "out", + "type": "git_buf *", + "comment": "buffer to store the e-mail patch in" + }, + { + "name": "diff", + "type": "git_diff *", + "comment": "containing the commit" + }, + { + "name": "opts", + "type": "const git_diff_format_email_options *", + "comment": "structure with options to influence content and formatting." + } + ], + "argline": "git_buf *out, git_diff *diff, const git_diff_format_email_options *opts", + "sig": "git_buf *::git_diff *::const git_diff_format_email_options *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Create an e-mail ready patch from a diff.

\n", + "comments": "", + "group": "diff" + }, + "git_diff_commit_as_email": { + "type": "function", + "file": "diff.h", + "line": 1326, + "lineto": 1333, + "args": [ + { + "name": "out", + "type": "git_buf *", + "comment": "buffer to store the e-mail patch in" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "containing the commit" + }, + { + "name": "commit", + "type": "git_commit *", + "comment": "pointer to up commit" + }, + { + "name": "patch_no", + "type": "size_t", + "comment": "patch number of the commit" + }, + { + "name": "total_patches", + "type": "size_t", + "comment": "total number of patches in the patch set" + }, + { + "name": "flags", + "type": "git_diff_format_email_flags_t", + "comment": "determines the formatting of the e-mail" + }, + { + "name": "diff_opts", + "type": "const git_diff_options *", + "comment": "structure with options to influence diff or NULL for defaults." + } + ], + "argline": "git_buf *out, git_repository *repo, git_commit *commit, size_t patch_no, size_t total_patches, git_diff_format_email_flags_t flags, const git_diff_options *diff_opts", + "sig": "git_buf *::git_repository *::git_commit *::size_t::size_t::git_diff_format_email_flags_t::const git_diff_options *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Create an e-mail ready patch for a commit.

\n", + "comments": "

Does not support creating patches for merge commits (yet).

\n", + "group": "diff" + }, + "git_diff_format_email_init_options": { + "type": "function", + "file": "diff.h", + "line": 1344, + "lineto": 1346, + "args": [ + { + "name": "opts", + "type": "git_diff_format_email_options *", + "comment": "The `git_diff_format_email_options` struct to initialize" + }, + { + "name": "version", + "type": "unsigned int", + "comment": "Version of struct; pass `GIT_DIFF_FORMAT_EMAIL_OPTIONS_VERSION`" + } + ], + "argline": "git_diff_format_email_options *opts, unsigned int version", + "sig": "git_diff_format_email_options *::unsigned int", + "return": { + "type": "int", + "comment": " Zero on success; -1 on failure." + }, + "description": "

Initializes a git_diff_format_email_options with default values.

\n", + "comments": "

Equivalent to creating an instance with GIT_DIFF_FORMAT_EMAIL_OPTIONS_INIT.

\n", + "group": "diff" + }, + "giterr_last": { + "type": "function", + "file": "errors.h", + "line": 110, + "lineto": 110, + "args": [], + "argline": "", + "sig": "", + "return": { + "type": "const git_error *", + "comment": " A git_error object." + }, + "description": "

Return the last git_error object that was generated for the\n current thread or NULL if no error has occurred.

\n", + "comments": "", + "group": "giterr", + "examples": { + "general.c": [ + "ex/HEAD/general.html#giterr_last-27" + ], + "network/clone.c": [ + "ex/HEAD/network/clone.html#giterr_last-2" + ], + "network/git2.c": [ + "ex/HEAD/network/git2.html#giterr_last-1", + "ex/HEAD/network/git2.html#giterr_last-2" + ] + } + }, + "giterr_clear": { + "type": "function", + "file": "errors.h", + "line": 115, + "lineto": 115, + "args": [], + "argline": "", + "sig": "", + "return": { + "type": "void", + "comment": null + }, + "description": "

Clear the last library error that occurred for this thread.

\n", + "comments": "", + "group": "giterr" + }, + "giterr_set_str": { + "type": "function", + "file": "errors.h", + "line": 133, + "lineto": 133, + "args": [ + { + "name": "error_class", + "type": "int", + "comment": "One of the `git_error_t` enum above describing the\n general subsystem that is responsible for the error." + }, + { + "name": "string", + "type": "const char *", + "comment": "The formatted error message to keep" + } + ], + "argline": "int error_class, const char *string", + "sig": "int::const char *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Set the error message string for this thread.

\n", + "comments": "

This function is public so that custom ODB backends and the like can relay an error message through libgit2. Most regular users of libgit2 will never need to call this function -- actually, calling it in most circumstances (for example, calling from within a callback function) will just end up having the value overwritten by libgit2 internals.

\n\n

This error message is stored in thread-local storage and only applies to the particular thread that this libgit2 call is made from.

\n", + "group": "giterr" + }, + "giterr_set_oom": { + "type": "function", + "file": "errors.h", + "line": 144, + "lineto": 144, + "args": [], + "argline": "", + "sig": "", + "return": { + "type": "void", + "comment": null + }, + "description": "

Set the error message to a special value for memory allocation failure.

\n", + "comments": "

The normal giterr_set_str() function attempts to strdup() the string that is passed in. This is not a good idea when the error in question is a memory allocation failure. That circumstance has a special setter function that sets the error string to a known and statically allocated internal value.

\n", + "group": "giterr" + }, + "git_filter_list_load": { + "type": "function", + "file": "filter.h", + "line": 90, + "lineto": 96, + "args": [ + { + "name": "filters", + "type": "git_filter_list **", + "comment": "Output newly created git_filter_list (or NULL)" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "Repository object that contains `path`" + }, + { + "name": "blob", + "type": "git_blob *", + "comment": "The blob to which the filter will be applied (if known)" + }, + { + "name": "path", + "type": "const char *", + "comment": "Relative path of the file to be filtered" + }, + { + "name": "mode", + "type": "git_filter_mode_t", + "comment": "Filtering direction (WT->ODB or ODB->WT)" + }, + { + "name": "flags", + "type": "uint32_t", + "comment": "Combination of `git_filter_flag_t` flags" + } + ], + "argline": "git_filter_list **filters, git_repository *repo, git_blob *blob, const char *path, git_filter_mode_t mode, uint32_t flags", + "sig": "git_filter_list **::git_repository *::git_blob *::const char *::git_filter_mode_t::uint32_t", + "return": { + "type": "int", + "comment": " 0 on success (which could still return NULL if no filters are\n needed for the requested file), \n<\n0 on error" + }, + "description": "

Load the filter list for a given path.

\n", + "comments": "

This will return 0 (success) but set the output git_filter_list to NULL if no filters are requested for the given file.

\n", + "group": "filter" + }, + "git_filter_list_contains": { + "type": "function", + "file": "filter.h", + "line": 110, + "lineto": 112, + "args": [ + { + "name": "filters", + "type": "git_filter_list *", + "comment": "A loaded git_filter_list (or NULL)" + }, + { + "name": "name", + "type": "const char *", + "comment": "The name of the filter to query" + } + ], + "argline": "git_filter_list *filters, const char *name", + "sig": "git_filter_list *::const char *", + "return": { + "type": "int", + "comment": " 1 if the filter is in the list, 0 otherwise" + }, + "description": "

Query the filter list to see if a given filter (by name) will run.\n The built-in filters "crlf" and "ident" can be queried, otherwise this\n is the name of the filter specified by the filter attribute.

\n", + "comments": "

This will return 0 if the given filter is not in the list, or 1 if the filter will be applied.

\n", + "group": "filter" + }, + "git_filter_list_apply_to_data": { + "type": "function", + "file": "filter.h", + "line": 134, + "lineto": 137, + "args": [ + { + "name": "out", + "type": "git_buf *", + "comment": "Buffer to store the result of the filtering" + }, + { + "name": "filters", + "type": "git_filter_list *", + "comment": "A loaded git_filter_list (or NULL)" + }, + { + "name": "in", + "type": "git_buf *", + "comment": "Buffer containing the data to filter" + } + ], + "argline": "git_buf *out, git_filter_list *filters, git_buf *in", + "sig": "git_buf *::git_filter_list *::git_buf *", + "return": { + "type": "int", + "comment": " 0 on success, an error code otherwise" + }, + "description": "

Apply filter list to a data buffer.

\n", + "comments": "

See git2/buffer.h for background on git_buf objects.

\n\n

If the in buffer holds data allocated by libgit2 (i.e. in->asize is not zero), then it will be overwritten when applying the filters. If not, then it will be left untouched.

\n\n

If there are no filters to apply (or filters is NULL), then the out buffer will reference the in buffer data (with asize set to zero) instead of allocating data. This keeps allocations to a minimum, but it means you have to be careful about freeing the in data since out may be pointing to it!

\n", + "group": "filter" + }, + "git_filter_list_apply_to_file": { + "type": "function", + "file": "filter.h", + "line": 148, + "lineto": 152, + "args": [ + { + "name": "out", + "type": "git_buf *", + "comment": "buffer into which to store the filtered file" + }, + { + "name": "filters", + "type": "git_filter_list *", + "comment": "the list of filters to apply" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository in which to perform the filtering" + }, + { + "name": "path", + "type": "const char *", + "comment": "the path of the file to filter, a relative path will be\n taken as relative to the workdir" + } + ], + "argline": "git_buf *out, git_filter_list *filters, git_repository *repo, const char *path", + "sig": "git_buf *::git_filter_list *::git_repository *::const char *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Apply a filter list to the contents of a file on disk

\n", + "comments": "", + "group": "filter" + }, + "git_filter_list_apply_to_blob": { + "type": "function", + "file": "filter.h", + "line": 161, + "lineto": 164, + "args": [ + { + "name": "out", + "type": "git_buf *", + "comment": "buffer into which to store the filtered file" + }, + { + "name": "filters", + "type": "git_filter_list *", + "comment": "the list of filters to apply" + }, + { + "name": "blob", + "type": "git_blob *", + "comment": "the blob to filter" + } + ], + "argline": "git_buf *out, git_filter_list *filters, git_blob *blob", + "sig": "git_buf *::git_filter_list *::git_blob *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Apply a filter list to the contents of a blob

\n", + "comments": "", + "group": "filter" + }, + "git_filter_list_stream_data": { + "type": "function", + "file": "filter.h", + "line": 173, + "lineto": 176, + "args": [ + { + "name": "filters", + "type": "git_filter_list *", + "comment": "the list of filters to apply" + }, + { + "name": "data", + "type": "git_buf *", + "comment": "the buffer to filter" + }, + { + "name": "target", + "type": "git_writestream *", + "comment": "the stream into which the data will be written" + } + ], + "argline": "git_filter_list *filters, git_buf *data, git_writestream *target", + "sig": "git_filter_list *::git_buf *::git_writestream *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Apply a filter list to an arbitrary buffer as a stream

\n", + "comments": "", + "group": "filter" + }, + "git_filter_list_stream_file": { + "type": "function", + "file": "filter.h", + "line": 187, + "lineto": 191, + "args": [ + { + "name": "filters", + "type": "git_filter_list *", + "comment": "the list of filters to apply" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository in which to perform the filtering" + }, + { + "name": "path", + "type": "const char *", + "comment": "the path of the file to filter, a relative path will be\n taken as relative to the workdir" + }, + { + "name": "target", + "type": "git_writestream *", + "comment": "the stream into which the data will be written" + } + ], + "argline": "git_filter_list *filters, git_repository *repo, const char *path, git_writestream *target", + "sig": "git_filter_list *::git_repository *::const char *::git_writestream *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Apply a filter list to a file as a stream

\n", + "comments": "", + "group": "filter" + }, + "git_filter_list_stream_blob": { + "type": "function", + "file": "filter.h", + "line": 200, + "lineto": 203, + "args": [ + { + "name": "filters", + "type": "git_filter_list *", + "comment": "the list of filters to apply" + }, + { + "name": "blob", + "type": "git_blob *", + "comment": "the blob to filter" + }, + { + "name": "target", + "type": "git_writestream *", + "comment": "the stream into which the data will be written" + } + ], + "argline": "git_filter_list *filters, git_blob *blob, git_writestream *target", + "sig": "git_filter_list *::git_blob *::git_writestream *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Apply a filter list to a blob as a stream

\n", + "comments": "", + "group": "filter" + }, + "git_filter_list_free": { + "type": "function", + "file": "filter.h", + "line": 210, + "lineto": 210, + "args": [ + { + "name": "filters", + "type": "git_filter_list *", + "comment": "A git_filter_list created by `git_filter_list_load`" + } + ], + "argline": "git_filter_list *filters", + "sig": "git_filter_list *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Free a git_filter_list

\n", + "comments": "", + "group": "filter" + }, + "git_libgit2_init": { + "type": "function", + "file": "global.h", + "line": 26, + "lineto": 26, + "args": [], + "argline": "", + "sig": "", + "return": { + "type": "int", + "comment": " the number of initializations of the library, or an error code." + }, + "description": "

Init the global state

\n", + "comments": "

This function must the called before any other libgit2 function in order to set up global state and threading.

\n\n

This function may be called multiple times - it will return the number of times the initialization has been called (including this one) that have not subsequently been shutdown.

\n", + "group": "libgit2", + "examples": { + "blame.c": [ + "ex/HEAD/blame.html#git_libgit2_init-8" + ], + "cat-file.c": [ + "ex/HEAD/cat-file.html#git_libgit2_init-10" + ], + "describe.c": [ + "ex/HEAD/describe.html#git_libgit2_init-4" + ], + "diff.c": [ + "ex/HEAD/diff.html#git_libgit2_init-13" + ], + "general.c": [ + "ex/HEAD/general.html#git_libgit2_init-28" + ], + "init.c": [ + "ex/HEAD/init.html#git_libgit2_init-2" + ], + "log.c": [ + "ex/HEAD/log.html#git_libgit2_init-30" + ], + "network/git2.c": [ + "ex/HEAD/network/git2.html#git_libgit2_init-3" + ], + "remote.c": [ + "ex/HEAD/remote.html#git_libgit2_init-2" + ], + "rev-parse.c": [ + "ex/HEAD/rev-parse.html#git_libgit2_init-1" + ], + "status.c": [ + "ex/HEAD/status.html#git_libgit2_init-1" + ], + "tag.c": [ + "ex/HEAD/tag.html#git_libgit2_init-3" + ] + } + }, + "git_libgit2_shutdown": { + "type": "function", + "file": "global.h", + "line": 39, + "lineto": 39, + "args": [], + "argline": "", + "sig": "", + "return": { + "type": "int", + "comment": " the number of remaining initializations of the library, or an\n error code." + }, + "description": "

Shutdown the global state

\n", + "comments": "

Clean up the global state and threading context after calling it as many times as git_libgit2_init() was called - it will return the number of remainining initializations that have not been shutdown (after this one).

\n", + "group": "libgit2", + "examples": { + "blame.c": [ + "ex/HEAD/blame.html#git_libgit2_shutdown-9" + ], + "cat-file.c": [ + "ex/HEAD/cat-file.html#git_libgit2_shutdown-11" + ], + "describe.c": [ + "ex/HEAD/describe.html#git_libgit2_shutdown-5" + ], + "diff.c": [ + "ex/HEAD/diff.html#git_libgit2_shutdown-14" + ], + "init.c": [ + "ex/HEAD/init.html#git_libgit2_shutdown-3" + ], + "log.c": [ + "ex/HEAD/log.html#git_libgit2_shutdown-31" + ], + "network/git2.c": [ + "ex/HEAD/network/git2.html#git_libgit2_shutdown-4" + ], + "remote.c": [ + "ex/HEAD/remote.html#git_libgit2_shutdown-3" + ], + "rev-parse.c": [ + "ex/HEAD/rev-parse.html#git_libgit2_shutdown-2" + ], + "status.c": [ + "ex/HEAD/status.html#git_libgit2_shutdown-2" + ], + "tag.c": [ + "ex/HEAD/tag.html#git_libgit2_shutdown-4" + ] + } + }, + "git_graph_ahead_behind": { + "type": "function", + "file": "graph.h", + "line": 37, + "lineto": 37, + "args": [ + { + "name": "ahead", + "type": "size_t *", + "comment": "number of unique from commits in `upstream`" + }, + { + "name": "behind", + "type": "size_t *", + "comment": "number of unique from commits in `local`" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository where the commits exist" + }, + { + "name": "local", + "type": "const git_oid *", + "comment": "the commit for local" + }, + { + "name": "upstream", + "type": "const git_oid *", + "comment": "the commit for upstream" + } + ], + "argline": "size_t *ahead, size_t *behind, git_repository *repo, const git_oid *local, const git_oid *upstream", + "sig": "size_t *::size_t *::git_repository *::const git_oid *::const git_oid *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Count the number of unique commits between two commit objects

\n", + "comments": "

There is no need for branches containing the commits to have any upstream relationship, but it helps to think of one as a branch and the other as its upstream, the ahead and behind values will be what git would report for the branches.

\n", + "group": "graph" + }, + "git_graph_descendant_of": { + "type": "function", + "file": "graph.h", + "line": 48, + "lineto": 51, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": null + }, + { + "name": "commit", + "type": "const git_oid *", + "comment": "a previously loaded commit." + }, + { + "name": "ancestor", + "type": "const git_oid *", + "comment": "a potential ancestor commit." + } + ], + "argline": "git_repository *repo, const git_oid *commit, const git_oid *ancestor", + "sig": "git_repository *::const git_oid *::const git_oid *", + "return": { + "type": "int", + "comment": " 1 if the given commit is a descendant of the potential ancestor,\n 0 if not, error code otherwise." + }, + "description": "

Determine if a commit is the descendant of another commit.

\n", + "comments": "", + "group": "graph" + }, + "git_ignore_add_rule": { + "type": "function", + "file": "ignore.h", + "line": 37, + "lineto": 39, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "The repository to add ignore rules to." + }, + { + "name": "rules", + "type": "const char *", + "comment": "Text of rules, a la the contents of a .gitignore file.\n It is okay to have multiple rules in the text; if so,\n each rule should be terminated with a newline." + } + ], + "argline": "git_repository *repo, const char *rules", + "sig": "git_repository *::const char *", + "return": { + "type": "int", + "comment": " 0 on success" + }, + "description": "

Add ignore rules for a repository.

\n", + "comments": "

Excludesfile rules (i.e. .gitignore rules) are generally read from .gitignore files in the repository tree or from a shared system file only if a "core.excludesfile" config value is set. The library also keeps a set of per-repository internal ignores that can be configured in-memory and will not persist. This function allows you to add to that internal rules list.

\n\n

Example usage:

\n\n
 error = git_ignore_add_rule(myrepo, "*.c/ with space");\n
\n\n

This would add three rules to the ignores.

\n", + "group": "ignore" + }, + "git_ignore_clear_internal_rules": { + "type": "function", + "file": "ignore.h", + "line": 52, + "lineto": 53, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "The repository to remove ignore rules from." + } + ], + "argline": "git_repository *repo", + "sig": "git_repository *", + "return": { + "type": "int", + "comment": " 0 on success" + }, + "description": "

Clear ignore rules that were explicitly added.

\n", + "comments": "

Resets to the default internal ignore rules. This will not turn off rules in .gitignore files that actually exist in the filesystem.

\n\n

The default internal ignores ignore ".", ".." and ".git" entries.

\n", + "group": "ignore" + }, + "git_ignore_path_is_ignored": { + "type": "function", + "file": "ignore.h", + "line": 71, + "lineto": 74, + "args": [ + { + "name": "ignored", + "type": "int *", + "comment": "boolean returning 0 if the file is not ignored, 1 if it is" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "a repository object" + }, + { + "name": "path", + "type": "const char *", + "comment": "the file to check ignores for, relative to the repo's workdir." + } + ], + "argline": "int *ignored, git_repository *repo, const char *path", + "sig": "int *::git_repository *::const char *", + "return": { + "type": "int", + "comment": " 0 if ignore rules could be processed for the file (regardless\n of whether it exists or not), or an error \n<\n 0 if they could not." + }, + "description": "

Test if the ignore rules apply to a given path.

\n", + "comments": "

This function checks the ignore rules to see if they would apply to the given file. This indicates if the file would be ignored regardless of whether the file is already in the index or committed to the repository.

\n\n

One way to think of this is if you were to do "git add ." on the directory containing the file, would it be added or not?

\n", + "group": "ignore" + }, + "git_index_open": { + "type": "function", + "file": "index.h", + "line": 203, + "lineto": 203, + "args": [ + { + "name": "out", + "type": "git_index **", + "comment": "the pointer for the new index" + }, + { + "name": "index_path", + "type": "const char *", + "comment": "the path to the index file in disk" + } + ], + "argline": "git_index **out, const char *index_path", + "sig": "git_index **::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Create a new bare Git index object as a memory representation\n of the Git index file in 'index_path', without a repository\n to back it.

\n", + "comments": "

Since there is no ODB or working directory behind this index, any Index methods which rely on these (e.g. index_add_bypath) will fail with the GIT_ERROR error code.

\n\n

If you need to access the index of an actual repository, use the git_repository_index wrapper.

\n\n

The index must be freed once it's no longer in use.

\n", + "group": "index" + }, + "git_index_new": { + "type": "function", + "file": "index.h", + "line": 216, + "lineto": 216, + "args": [ + { + "name": "out", + "type": "git_index **", + "comment": "the pointer for the new index" + } + ], + "argline": "git_index **out", + "sig": "git_index **", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Create an in-memory index object.

\n", + "comments": "

This index object cannot be read/written to the filesystem, but may be used to perform in-memory index operations.

\n\n

The index must be freed once it's no longer in use.

\n", + "group": "index" + }, + "git_index_free": { + "type": "function", + "file": "index.h", + "line": 223, + "lineto": 223, + "args": [ + { + "name": "index", + "type": "git_index *", + "comment": "an existing index object" + } + ], + "argline": "git_index *index", + "sig": "git_index *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Free an existing index object.

\n", + "comments": "", + "group": "index", + "examples": { + "general.c": [ + "ex/HEAD/general.html#git_index_free-29" + ], + "init.c": [ + "ex/HEAD/init.html#git_index_free-4" + ] + } + }, + "git_index_owner": { + "type": "function", + "file": "index.h", + "line": 231, + "lineto": 231, + "args": [ + { + "name": "index", + "type": "const git_index *", + "comment": "The index" + } + ], + "argline": "const git_index *index", + "sig": "const git_index *", + "return": { + "type": "git_repository *", + "comment": " A pointer to the repository" + }, + "description": "

Get the repository this index relates to

\n", + "comments": "", + "group": "index" + }, + "git_index_caps": { + "type": "function", + "file": "index.h", + "line": 239, + "lineto": 239, + "args": [ + { + "name": "index", + "type": "const git_index *", + "comment": "An existing index object" + } + ], + "argline": "const git_index *index", + "sig": "const git_index *", + "return": { + "type": "int", + "comment": " A combination of GIT_INDEXCAP values" + }, + "description": "

Read index capabilities flags.

\n", + "comments": "", + "group": "index" + }, + "git_index_set_caps": { + "type": "function", + "file": "index.h", + "line": 252, + "lineto": 252, + "args": [ + { + "name": "index", + "type": "git_index *", + "comment": "An existing index object" + }, + { + "name": "caps", + "type": "int", + "comment": "A combination of GIT_INDEXCAP values" + } + ], + "argline": "git_index *index, int caps", + "sig": "git_index *::int", + "return": { + "type": "int", + "comment": " 0 on success, -1 on failure" + }, + "description": "

Set index capabilities flags.

\n", + "comments": "

If you pass GIT_INDEXCAP_FROM_OWNER for the caps, then the capabilities will be read from the config of the owner object, looking at core.ignorecase, core.filemode, core.symlinks.

\n", + "group": "index" + }, + "git_index_read": { + "type": "function", + "file": "index.h", + "line": 271, + "lineto": 271, + "args": [ + { + "name": "index", + "type": "git_index *", + "comment": "an existing index object" + }, + { + "name": "force", + "type": "int", + "comment": "if true, always reload, vs. only read if file has changed" + } + ], + "argline": "git_index *index, int force", + "sig": "git_index *::int", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Update the contents of an existing index object in memory by reading\n from the hard disk.

\n", + "comments": "

If force is true, this performs a "hard" read that discards in-memory changes and always reloads the on-disk index data. If there is no on-disk version, the index will be cleared.

\n\n

If force is false, this does a "soft" read that reloads the index data from disk only if it has changed since the last time it was loaded. Purely in-memory index data will be untouched. Be aware: if there are changes on disk, unwritten in-memory changes are discarded.

\n", + "group": "index" + }, + "git_index_write": { + "type": "function", + "file": "index.h", + "line": 280, + "lineto": 280, + "args": [ + { + "name": "index", + "type": "git_index *", + "comment": "an existing index object" + } + ], + "argline": "git_index *index", + "sig": "git_index *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Write an existing index object from memory back to disk\n using an atomic file lock.

\n", + "comments": "", + "group": "index" + }, + "git_index_path": { + "type": "function", + "file": "index.h", + "line": 288, + "lineto": 288, + "args": [ + { + "name": "index", + "type": "const git_index *", + "comment": "an existing index object" + } + ], + "argline": "const git_index *index", + "sig": "const git_index *", + "return": { + "type": "const char *", + "comment": " path to index file or NULL for in-memory index" + }, + "description": "

Get the full path to the index file on disk.

\n", + "comments": "", + "group": "index" + }, + "git_index_checksum": { + "type": "function", + "file": "index.h", + "line": 300, + "lineto": 300, + "args": [ + { + "name": "index", + "type": "git_index *", + "comment": "an existing index object" + } + ], + "argline": "git_index *index", + "sig": "git_index *", + "return": { + "type": "const git_oid *", + "comment": " a pointer to the checksum of the index" + }, + "description": "

Get the checksum of the index

\n", + "comments": "

This checksum is the SHA-1 hash over the index file (except the last 20 bytes which are the checksum itself). In cases where the index does not exist on-disk, it will be zeroed out.

\n", + "group": "index" + }, + "git_index_read_tree": { + "type": "function", + "file": "index.h", + "line": 311, + "lineto": 311, + "args": [ + { + "name": "index", + "type": "git_index *", + "comment": "an existing index object" + }, + { + "name": "tree", + "type": "const git_tree *", + "comment": "tree to read" + } + ], + "argline": "git_index *index, const git_tree *tree", + "sig": "git_index *::const git_tree *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Read a tree into the index file with stats

\n", + "comments": "

The current index contents will be replaced by the specified tree.

\n", + "group": "index" + }, + "git_index_write_tree": { + "type": "function", + "file": "index.h", + "line": 332, + "lineto": 332, + "args": [ + { + "name": "out", + "type": "git_oid *", + "comment": "Pointer where to store the OID of the written tree" + }, + { + "name": "index", + "type": "git_index *", + "comment": "Index to write" + } + ], + "argline": "git_oid *out, git_index *index", + "sig": "git_oid *::git_index *", + "return": { + "type": "int", + "comment": " 0 on success, GIT_EUNMERGED when the index is not clean\n or an error code" + }, + "description": "

Write the index as a tree

\n", + "comments": "

This method will scan the index and write a representation of its current state back to disk; it recursively creates tree objects for each of the subtrees stored in the index, but only returns the OID of the root tree. This is the OID that can be used e.g. to create a commit.

\n\n

The index instance cannot be bare, and needs to be associated to an existing repository.

\n\n

The index must not contain any file in conflict.

\n", + "group": "index", + "examples": { + "init.c": [ + "ex/HEAD/init.html#git_index_write_tree-5" + ] + } + }, + "git_index_write_tree_to": { + "type": "function", + "file": "index.h", + "line": 349, + "lineto": 349, + "args": [ + { + "name": "out", + "type": "git_oid *", + "comment": "Pointer where to store OID of the the written tree" + }, + { + "name": "index", + "type": "git_index *", + "comment": "Index to write" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "Repository where to write the tree" + } + ], + "argline": "git_oid *out, git_index *index, git_repository *repo", + "sig": "git_oid *::git_index *::git_repository *", + "return": { + "type": "int", + "comment": " 0 on success, GIT_EUNMERGED when the index is not clean\n or an error code" + }, + "description": "

Write the index as a tree to the given repository

\n", + "comments": "

This method will do the same as git_index_write_tree, but letting the user choose the repository where the tree will be written.

\n\n

The index must not contain any file in conflict.

\n", + "group": "index" + }, + "git_index_entrycount": { + "type": "function", + "file": "index.h", + "line": 368, + "lineto": 368, + "args": [ + { + "name": "index", + "type": "const git_index *", + "comment": "an existing index object" + } + ], + "argline": "const git_index *index", + "sig": "const git_index *", + "return": { + "type": "size_t", + "comment": " integer of count of current entries" + }, + "description": "

Get the count of entries currently in the index

\n", + "comments": "", + "group": "index", + "examples": { + "general.c": [ + "ex/HEAD/general.html#git_index_entrycount-30" + ] + } + }, + "git_index_clear": { + "type": "function", + "file": "index.h", + "line": 379, + "lineto": 379, + "args": [ + { + "name": "index", + "type": "git_index *", + "comment": "an existing index object" + } + ], + "argline": "git_index *index", + "sig": "git_index *", + "return": { + "type": "int", + "comment": " 0 on success, error code \n<\n 0 on failure" + }, + "description": "

Clear the contents (all the entries) of an index object.

\n", + "comments": "

This clears the index object in memory; changes must be explicitly written to disk for them to take effect persistently.

\n", + "group": "index" + }, + "git_index_get_byindex": { + "type": "function", + "file": "index.h", + "line": 392, + "lineto": 393, + "args": [ + { + "name": "index", + "type": "git_index *", + "comment": "an existing index object" + }, + { + "name": "n", + "type": "size_t", + "comment": "the position of the entry" + } + ], + "argline": "git_index *index, size_t n", + "sig": "git_index *::size_t", + "return": { + "type": "const git_index_entry *", + "comment": " a pointer to the entry; NULL if out of bounds" + }, + "description": "

Get a pointer to one of the entries in the index

\n", + "comments": "

The entry is not modifiable and should not be freed. Because the git_index_entry struct is a publicly defined struct, you should be able to make your own permanent copy of the data if necessary.

\n", + "group": "index", + "examples": { + "general.c": [ + "ex/HEAD/general.html#git_index_get_byindex-31" + ] + } + }, + "git_index_get_bypath": { + "type": "function", + "file": "index.h", + "line": 407, + "lineto": 408, + "args": [ + { + "name": "index", + "type": "git_index *", + "comment": "an existing index object" + }, + { + "name": "path", + "type": "const char *", + "comment": "path to search" + }, + { + "name": "stage", + "type": "int", + "comment": "stage to search" + } + ], + "argline": "git_index *index, const char *path, int stage", + "sig": "git_index *::const char *::int", + "return": { + "type": "const git_index_entry *", + "comment": " a pointer to the entry; NULL if it was not found" + }, + "description": "

Get a pointer to one of the entries in the index

\n", + "comments": "

The entry is not modifiable and should not be freed. Because the git_index_entry struct is a publicly defined struct, you should be able to make your own permanent copy of the data if necessary.

\n", + "group": "index" + }, + "git_index_remove": { + "type": "function", + "file": "index.h", + "line": 418, + "lineto": 418, + "args": [ + { + "name": "index", + "type": "git_index *", + "comment": "an existing index object" + }, + { + "name": "path", + "type": "const char *", + "comment": "path to search" + }, + { + "name": "stage", + "type": "int", + "comment": "stage to search" + } + ], + "argline": "git_index *index, const char *path, int stage", + "sig": "git_index *::const char *::int", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Remove an entry from the index

\n", + "comments": "", + "group": "index" + }, + "git_index_remove_directory": { + "type": "function", + "file": "index.h", + "line": 428, + "lineto": 429, + "args": [ + { + "name": "index", + "type": "git_index *", + "comment": "an existing index object" + }, + { + "name": "dir", + "type": "const char *", + "comment": "container directory path" + }, + { + "name": "stage", + "type": "int", + "comment": "stage to search" + } + ], + "argline": "git_index *index, const char *dir, int stage", + "sig": "git_index *::const char *::int", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Remove all entries from the index under a given directory

\n", + "comments": "", + "group": "index" + }, + "git_index_add": { + "type": "function", + "file": "index.h", + "line": 445, + "lineto": 445, + "args": [ + { + "name": "index", + "type": "git_index *", + "comment": "an existing index object" + }, + { + "name": "source_entry", + "type": "const git_index_entry *", + "comment": "new entry object" + } + ], + "argline": "git_index *index, const git_index_entry *source_entry", + "sig": "git_index *::const git_index_entry *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Add or update an index entry from an in-memory struct

\n", + "comments": "

If a previous index entry exists that has the same path and stage as the given 'source_entry', it will be replaced. Otherwise, the 'source_entry' will be added.

\n\n

A full copy (including the 'path' string) of the given 'source_entry' will be inserted on the index.

\n", + "group": "index" + }, + "git_index_entry_stage": { + "type": "function", + "file": "index.h", + "line": 457, + "lineto": 457, + "args": [ + { + "name": "entry", + "type": "const git_index_entry *", + "comment": "The entry" + } + ], + "argline": "const git_index_entry *entry", + "sig": "const git_index_entry *", + "return": { + "type": "int", + "comment": " the stage number" + }, + "description": "

Return the stage number from a git index entry

\n", + "comments": "

This entry is calculated from the entry's flag attribute like this:

\n\n
(entry->flags & GIT_IDXENTRY_STAGEMASK) >> GIT_IDXENTRY_STAGESHIFT\n
\n", + "group": "index" + }, + "git_index_entry_is_conflict": { + "type": "function", + "file": "index.h", + "line": 466, + "lineto": 466, + "args": [ + { + "name": "entry", + "type": "const git_index_entry *", + "comment": "The entry" + } + ], + "argline": "const git_index_entry *entry", + "sig": "const git_index_entry *", + "return": { + "type": "int", + "comment": " 1 if the entry is a conflict entry, 0 otherwise" + }, + "description": "

Return whether the given index entry is a conflict (has a high stage\n entry). This is simply shorthand for git_index_entry_stage > 0.

\n", + "comments": "", + "group": "index" + }, + "git_index_add_bypath": { + "type": "function", + "file": "index.h", + "line": 497, + "lineto": 497, + "args": [ + { + "name": "index", + "type": "git_index *", + "comment": "an existing index object" + }, + { + "name": "path", + "type": "const char *", + "comment": "filename to add" + } + ], + "argline": "git_index *index, const char *path", + "sig": "git_index *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Add or update an index entry from a file on disk

\n", + "comments": "

The file path must be relative to the repository's working folder and must be readable.

\n\n

This method will fail in bare index instances.

\n\n

This forces the file to be added to the index, not looking at gitignore rules. Those rules can be evaluated through the git_status APIs (in status.h) before calling this.

\n\n

If this file currently is the result of a merge conflict, this file will no longer be marked as conflicting. The data about the conflict will be moved to the "resolve undo" (REUC) section.

\n", + "group": "index" + }, + "git_index_add_frombuffer": { + "type": "function", + "file": "index.h", + "line": 526, + "lineto": 529, + "args": [ + { + "name": "index", + "type": "git_index *", + "comment": "an existing index object" + }, + { + "name": "entry", + "type": "const git_index_entry *", + "comment": "filename to add" + }, + { + "name": "buffer", + "type": "const void *", + "comment": "data to be written into the blob" + }, + { + "name": "len", + "type": "size_t", + "comment": "length of the data" + } + ], + "argline": "git_index *index, const git_index_entry *entry, const void *buffer, size_t len", + "sig": "git_index *::const git_index_entry *::const void *::size_t", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Add or update an index entry from a buffer in memory

\n", + "comments": "

This method will create a blob in the repository that owns the index and then add the index entry to the index. The path of the entry represents the position of the blob relative to the repository's root folder.

\n\n

If a previous index entry exists that has the same path as the given 'entry', it will be replaced. Otherwise, the 'entry' will be added. The id and the file_size of the 'entry' are updated with the real value of the blob.

\n\n

This forces the file to be added to the index, not looking at gitignore rules. Those rules can be evaluated through the git_status APIs (in status.h) before calling this.

\n\n

If this file currently is the result of a merge conflict, this file will no longer be marked as conflicting. The data about the conflict will be moved to the "resolve undo" (REUC) section.

\n", + "group": "index" + }, + "git_index_remove_bypath": { + "type": "function", + "file": "index.h", + "line": 545, + "lineto": 545, + "args": [ + { + "name": "index", + "type": "git_index *", + "comment": "an existing index object" + }, + { + "name": "path", + "type": "const char *", + "comment": "filename to remove" + } + ], + "argline": "git_index *index, const char *path", + "sig": "git_index *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Remove an index entry corresponding to a file on disk

\n", + "comments": "

The file path must be relative to the repository's working folder. It may exist.

\n\n

If this file currently is the result of a merge conflict, this file will no longer be marked as conflicting. The data about the conflict will be moved to the "resolve undo" (REUC) section.

\n", + "group": "index" + }, + "git_index_add_all": { + "type": "function", + "file": "index.h", + "line": 592, + "lineto": 597, + "args": [ + { + "name": "index", + "type": "git_index *", + "comment": "an existing index object" + }, + { + "name": "pathspec", + "type": "const git_strarray *", + "comment": "array of path patterns" + }, + { + "name": "flags", + "type": "unsigned int", + "comment": "combination of git_index_add_option_t flags" + }, + { + "name": "callback", + "type": "git_index_matched_path_cb", + "comment": "notification callback for each added/updated path (also\n gets index of matching pathspec entry); can be NULL;\n return 0 to add, >0 to skip, \n<\n0 to abort scan." + }, + { + "name": "payload", + "type": "void *", + "comment": "payload passed through to callback function" + } + ], + "argline": "git_index *index, const git_strarray *pathspec, unsigned int flags, git_index_matched_path_cb callback, void *payload", + "sig": "git_index *::const git_strarray *::unsigned int::git_index_matched_path_cb::void *", + "return": { + "type": "int", + "comment": " 0 on success, negative callback return value, or error code" + }, + "description": "

Add or update index entries matching files in the working directory.

\n", + "comments": "

This method will fail in bare index instances.

\n\n

The pathspec is a list of file names or shell glob patterns that will matched against files in the repository's working directory. Each file that matches will be added to the index (either updating an existing entry or adding a new entry). You can disable glob expansion and force exact matching with the GIT_INDEX_ADD_DISABLE_PATHSPEC_MATCH flag.

\n\n

Files that are ignored will be skipped (unlike git_index_add_bypath). If a file is already tracked in the index, then it will be updated even if it is ignored. Pass the GIT_INDEX_ADD_FORCE flag to skip the checking of ignore rules.

\n\n

To emulate git add -A and generate an error if the pathspec contains the exact path of an ignored file (when not using FORCE), add the GIT_INDEX_ADD_CHECK_PATHSPEC flag. This checks that each entry in the pathspec that is an exact match to a filename on disk is either not ignored or already in the index. If this check fails, the function will return GIT_EINVALIDSPEC.

\n\n

To emulate git add -A with the "dry-run" option, just use a callback function that always returns a positive value. See below for details.

\n\n

If any files are currently the result of a merge conflict, those files will no longer be marked as conflicting. The data about the conflicts will be moved to the "resolve undo" (REUC) section.

\n\n

If you provide a callback function, it will be invoked on each matching item in the working directory immediately before it is added to / updated in the index. Returning zero will add the item to the index, greater than zero will skip the item, and less than zero will abort the scan and return that value to the caller.

\n", + "group": "index" + }, + "git_index_remove_all": { + "type": "function", + "file": "index.h", + "line": 614, + "lineto": 618, + "args": [ + { + "name": "index", + "type": "git_index *", + "comment": "An existing index object" + }, + { + "name": "pathspec", + "type": "const git_strarray *", + "comment": "array of path patterns" + }, + { + "name": "callback", + "type": "git_index_matched_path_cb", + "comment": "notification callback for each removed path (also\n gets index of matching pathspec entry); can be NULL;\n return 0 to add, >0 to skip, \n<\n0 to abort scan." + }, + { + "name": "payload", + "type": "void *", + "comment": "payload passed through to callback function" + } + ], + "argline": "git_index *index, const git_strarray *pathspec, git_index_matched_path_cb callback, void *payload", + "sig": "git_index *::const git_strarray *::git_index_matched_path_cb::void *", + "return": { + "type": "int", + "comment": " 0 on success, negative callback return value, or error code" + }, + "description": "

Remove all matching index entries.

\n", + "comments": "

If you provide a callback function, it will be invoked on each matching item in the index immediately before it is removed. Return 0 to remove the item, > 0 to skip the item, and < 0 to abort the scan.

\n", + "group": "index" + }, + "git_index_update_all": { + "type": "function", + "file": "index.h", + "line": 643, + "lineto": 647, + "args": [ + { + "name": "index", + "type": "git_index *", + "comment": "An existing index object" + }, + { + "name": "pathspec", + "type": "const git_strarray *", + "comment": "array of path patterns" + }, + { + "name": "callback", + "type": "git_index_matched_path_cb", + "comment": "notification callback for each updated path (also\n gets index of matching pathspec entry); can be NULL;\n return 0 to add, >0 to skip, \n<\n0 to abort scan." + }, + { + "name": "payload", + "type": "void *", + "comment": "payload passed through to callback function" + } + ], + "argline": "git_index *index, const git_strarray *pathspec, git_index_matched_path_cb callback, void *payload", + "sig": "git_index *::const git_strarray *::git_index_matched_path_cb::void *", + "return": { + "type": "int", + "comment": " 0 on success, negative callback return value, or error code" + }, + "description": "

Update all index entries to match the working directory

\n", + "comments": "

This method will fail in bare index instances.

\n\n

This scans the existing index entries and synchronizes them with the working directory, deleting them if the corresponding working directory file no longer exists otherwise updating the information (including adding the latest version of file to the ODB if needed).

\n\n

If you provide a callback function, it will be invoked on each matching item in the index immediately before it is updated (either refreshed or removed depending on working directory state). Return 0 to proceed with updating the item, > 0 to skip the item, and < 0 to abort the scan.

\n", + "group": "index" + }, + "git_index_find": { + "type": "function", + "file": "index.h", + "line": 658, + "lineto": 658, + "args": [ + { + "name": "at_pos", + "type": "size_t *", + "comment": "the address to which the position of the index entry is written (optional)" + }, + { + "name": "index", + "type": "git_index *", + "comment": "an existing index object" + }, + { + "name": "path", + "type": "const char *", + "comment": "path to search" + } + ], + "argline": "size_t *at_pos, git_index *index, const char *path", + "sig": "size_t *::git_index *::const char *", + "return": { + "type": "int", + "comment": " a zero-based position in the index if found; GIT_ENOTFOUND otherwise" + }, + "description": "

Find the first position of any entries which point to given\n path in the Git index.

\n", + "comments": "", + "group": "index" + }, + "git_index_find_prefix": { + "type": "function", + "file": "index.h", + "line": 669, + "lineto": 669, + "args": [ + { + "name": "at_pos", + "type": "size_t *", + "comment": "the address to which the position of the index entry is written (optional)" + }, + { + "name": "index", + "type": "git_index *", + "comment": "an existing index object" + }, + { + "name": "prefix", + "type": "const char *", + "comment": "the prefix to search for" + } + ], + "argline": "size_t *at_pos, git_index *index, const char *prefix", + "sig": "size_t *::git_index *::const char *", + "return": { + "type": "int", + "comment": " 0 with valid value in at_pos; an error code otherwise" + }, + "description": "

Find the first position of any entries matching a prefix. To find the first position\n of a path inside a given folder, suffix the prefix with a '/'.

\n", + "comments": "", + "group": "index" + }, + "git_index_conflict_add": { + "type": "function", + "file": "index.h", + "line": 694, + "lineto": 698, + "args": [ + { + "name": "index", + "type": "git_index *", + "comment": "an existing index object" + }, + { + "name": "ancestor_entry", + "type": "const git_index_entry *", + "comment": "the entry data for the ancestor of the conflict" + }, + { + "name": "our_entry", + "type": "const git_index_entry *", + "comment": "the entry data for our side of the merge conflict" + }, + { + "name": "their_entry", + "type": "const git_index_entry *", + "comment": "the entry data for their side of the merge conflict" + } + ], + "argline": "git_index *index, const git_index_entry *ancestor_entry, const git_index_entry *our_entry, const git_index_entry *their_entry", + "sig": "git_index *::const git_index_entry *::const git_index_entry *::const git_index_entry *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Add or update index entries to represent a conflict. Any staged\n entries that exist at the given paths will be removed.

\n", + "comments": "

The entries are the entries from the tree included in the merge. Any entry may be null to indicate that that file was not present in the trees during the merge. For example, ancestor_entry may be NULL to indicate that a file was added in both branches and must be resolved.

\n", + "group": "index" + }, + "git_index_conflict_get": { + "type": "function", + "file": "index.h", + "line": 714, + "lineto": 719, + "args": [ + { + "name": "ancestor_out", + "type": "const git_index_entry **", + "comment": "Pointer to store the ancestor entry" + }, + { + "name": "our_out", + "type": "const git_index_entry **", + "comment": "Pointer to store the our entry" + }, + { + "name": "their_out", + "type": "const git_index_entry **", + "comment": "Pointer to store the their entry" + }, + { + "name": "index", + "type": "git_index *", + "comment": "an existing index object" + }, + { + "name": "path", + "type": "const char *", + "comment": "path to search" + } + ], + "argline": "const git_index_entry **ancestor_out, const git_index_entry **our_out, const git_index_entry **their_out, git_index *index, const char *path", + "sig": "const git_index_entry **::const git_index_entry **::const git_index_entry **::git_index *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Get the index entries that represent a conflict of a single file.

\n", + "comments": "

The entries are not modifiable and should not be freed. Because the git_index_entry struct is a publicly defined struct, you should be able to make your own permanent copy of the data if necessary.

\n", + "group": "index" + }, + "git_index_conflict_remove": { + "type": "function", + "file": "index.h", + "line": 728, + "lineto": 728, + "args": [ + { + "name": "index", + "type": "git_index *", + "comment": "an existing index object" + }, + { + "name": "path", + "type": "const char *", + "comment": "path to remove conflicts for" + } + ], + "argline": "git_index *index, const char *path", + "sig": "git_index *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Removes the index entries that represent a conflict of a single file.

\n", + "comments": "", + "group": "index" + }, + "git_index_conflict_cleanup": { + "type": "function", + "file": "index.h", + "line": 736, + "lineto": 736, + "args": [ + { + "name": "index", + "type": "git_index *", + "comment": "an existing index object" + } + ], + "argline": "git_index *index", + "sig": "git_index *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Remove all conflicts in the index (entries with a stage greater than 0).

\n", + "comments": "", + "group": "index" + }, + "git_index_has_conflicts": { + "type": "function", + "file": "index.h", + "line": 743, + "lineto": 743, + "args": [ + { + "name": "index", + "type": "const git_index *", + "comment": null + } + ], + "argline": "const git_index *index", + "sig": "const git_index *", + "return": { + "type": "int", + "comment": " 1 if at least one conflict is found, 0 otherwise." + }, + "description": "

Determine if the index contains entries representing file conflicts.

\n", + "comments": "", + "group": "index" + }, + "git_index_conflict_iterator_new": { + "type": "function", + "file": "index.h", + "line": 754, + "lineto": 756, + "args": [ + { + "name": "iterator_out", + "type": "git_index_conflict_iterator **", + "comment": "The newly created conflict iterator" + }, + { + "name": "index", + "type": "git_index *", + "comment": "The index to scan" + } + ], + "argline": "git_index_conflict_iterator **iterator_out, git_index *index", + "sig": "git_index_conflict_iterator **::git_index *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Create an iterator for the conflicts in the index.

\n", + "comments": "

The index must not be modified while iterating; the results are undefined.

\n", + "group": "index" + }, + "git_index_conflict_next": { + "type": "function", + "file": "index.h", + "line": 768, + "lineto": 772, + "args": [ + { + "name": "ancestor_out", + "type": "const git_index_entry **", + "comment": "Pointer to store the ancestor side of the conflict" + }, + { + "name": "our_out", + "type": "const git_index_entry **", + "comment": "Pointer to store our side of the conflict" + }, + { + "name": "their_out", + "type": "const git_index_entry **", + "comment": "Pointer to store their side of the conflict" + }, + { + "name": "iterator", + "type": "git_index_conflict_iterator *", + "comment": null + } + ], + "argline": "const git_index_entry **ancestor_out, const git_index_entry **our_out, const git_index_entry **their_out, git_index_conflict_iterator *iterator", + "sig": "const git_index_entry **::const git_index_entry **::const git_index_entry **::git_index_conflict_iterator *", + "return": { + "type": "int", + "comment": " 0 (no error), GIT_ITEROVER (iteration is done) or an error code\n (negative value)" + }, + "description": "

Returns the current conflict (ancestor, ours and theirs entry) and\n advance the iterator internally to the next value.

\n", + "comments": "", + "group": "index" + }, + "git_index_conflict_iterator_free": { + "type": "function", + "file": "index.h", + "line": 779, + "lineto": 780, + "args": [ + { + "name": "iterator", + "type": "git_index_conflict_iterator *", + "comment": "pointer to the iterator" + } + ], + "argline": "git_index_conflict_iterator *iterator", + "sig": "git_index_conflict_iterator *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Frees a git_index_conflict_iterator.

\n", + "comments": "", + "group": "index" + }, + "git_indexer_new": { + "type": "function", + "file": "indexer.h", + "line": 30, + "lineto": 36, + "args": [ + { + "name": "out", + "type": "git_indexer **", + "comment": "where to store the indexer instance" + }, + { + "name": "path", + "type": "const char *", + "comment": "to the directory where the packfile should be stored" + }, + { + "name": "mode", + "type": "unsigned int", + "comment": "permissions to use creating packfile or 0 for defaults" + }, + { + "name": "odb", + "type": "git_odb *", + "comment": "object database from which to read base objects when\n fixing thin packs. Pass NULL if no thin pack is expected (an error\n will be returned if there are bases missing)" + }, + { + "name": "progress_cb", + "type": "git_transfer_progress_cb", + "comment": "function to call with progress information" + }, + { + "name": "progress_cb_payload", + "type": "void *", + "comment": "payload for the progress callback" + } + ], + "argline": "git_indexer **out, const char *path, unsigned int mode, git_odb *odb, git_transfer_progress_cb progress_cb, void *progress_cb_payload", + "sig": "git_indexer **::const char *::unsigned int::git_odb *::git_transfer_progress_cb::void *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Create a new indexer instance

\n", + "comments": "", + "group": "indexer", + "examples": { + "network/index-pack.c": [ + "ex/HEAD/network/index-pack.html#git_indexer_new-1" + ] + } + }, + "git_indexer_append": { + "type": "function", + "file": "indexer.h", + "line": 46, + "lineto": 46, + "args": [ + { + "name": "idx", + "type": "git_indexer *", + "comment": "the indexer" + }, + { + "name": "data", + "type": "const void *", + "comment": "the data to add" + }, + { + "name": "size", + "type": "size_t", + "comment": "the size of the data in bytes" + }, + { + "name": "stats", + "type": "git_transfer_progress *", + "comment": "stat storage" + } + ], + "argline": "git_indexer *idx, const void *data, size_t size, git_transfer_progress *stats", + "sig": "git_indexer *::const void *::size_t::git_transfer_progress *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Add data to the indexer

\n", + "comments": "", + "group": "indexer", + "examples": { + "network/index-pack.c": [ + "ex/HEAD/network/index-pack.html#git_indexer_append-2" + ] + } + }, + "git_indexer_commit": { + "type": "function", + "file": "indexer.h", + "line": 55, + "lineto": 55, + "args": [ + { + "name": "idx", + "type": "git_indexer *", + "comment": "the indexer" + }, + { + "name": "stats", + "type": "git_transfer_progress *", + "comment": null + } + ], + "argline": "git_indexer *idx, git_transfer_progress *stats", + "sig": "git_indexer *::git_transfer_progress *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Finalize the pack and index

\n", + "comments": "

Resolve any pending deltas and write out the index file

\n", + "group": "indexer", + "examples": { + "network/index-pack.c": [ + "ex/HEAD/network/index-pack.html#git_indexer_commit-3" + ] + } + }, + "git_indexer_hash": { + "type": "function", + "file": "indexer.h", + "line": 65, + "lineto": 65, + "args": [ + { + "name": "idx", + "type": "const git_indexer *", + "comment": "the indexer instance" + } + ], + "argline": "const git_indexer *idx", + "sig": "const git_indexer *", + "return": { + "type": "const git_oid *", + "comment": null + }, + "description": "

Get the packfile's hash

\n", + "comments": "

A packfile's name is derived from the sorted hashing of all object names. This is only correct after the index has been finalized.

\n", + "group": "indexer", + "examples": { + "network/index-pack.c": [ + "ex/HEAD/network/index-pack.html#git_indexer_hash-4" + ] + } + }, + "git_indexer_free": { + "type": "function", + "file": "indexer.h", + "line": 72, + "lineto": 72, + "args": [ + { + "name": "idx", + "type": "git_indexer *", + "comment": "the indexer to free" + } + ], + "argline": "git_indexer *idx", + "sig": "git_indexer *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Free the indexer and its resources

\n", + "comments": "", + "group": "indexer", + "examples": { + "network/index-pack.c": [ + "ex/HEAD/network/index-pack.html#git_indexer_free-5" + ] + } + }, + "git_merge_file_init_input": { + "type": "function", + "file": "merge.h", + "line": 60, + "lineto": 62, + "args": [ + { + "name": "opts", + "type": "git_merge_file_input *", + "comment": "the `git_merge_file_input` instance to initialize." + }, + { + "name": "version", + "type": "unsigned int", + "comment": "the version of the struct; you should pass\n `GIT_MERGE_FILE_INPUT_VERSION` here." + } + ], + "argline": "git_merge_file_input *opts, unsigned int version", + "sig": "git_merge_file_input *::unsigned int", + "return": { + "type": "int", + "comment": " Zero on success; -1 on failure." + }, + "description": "

Initializes a git_merge_file_input with default values. Equivalent to\n creating an instance with GIT_MERGE_FILE_INPUT_INIT.

\n", + "comments": "", + "group": "merge" + }, + "git_merge_file_init_options": { + "type": "function", + "file": "merge.h", + "line": 208, + "lineto": 210, + "args": [ + { + "name": "opts", + "type": "git_merge_file_options *", + "comment": "the `git_merge_file_options` instance to initialize." + }, + { + "name": "version", + "type": "unsigned int", + "comment": "the version of the struct; you should pass\n `GIT_MERGE_FILE_OPTIONS_VERSION` here." + } + ], + "argline": "git_merge_file_options *opts, unsigned int version", + "sig": "git_merge_file_options *::unsigned int", + "return": { + "type": "int", + "comment": " Zero on success; -1 on failure." + }, + "description": "

Initializes a git_merge_file_options with default values. Equivalent to\n creating an instance with GIT_MERGE_FILE_OPTIONS_INIT.

\n", + "comments": "", + "group": "merge" + }, + "git_merge_init_options": { + "type": "function", + "file": "merge.h", + "line": 304, + "lineto": 306, + "args": [ + { + "name": "opts", + "type": "git_merge_options *", + "comment": "the `git_merge_options` instance to initialize." + }, + { + "name": "version", + "type": "unsigned int", + "comment": "the version of the struct; you should pass\n `GIT_MERGE_OPTIONS_VERSION` here." + } + ], + "argline": "git_merge_options *opts, unsigned int version", + "sig": "git_merge_options *::unsigned int", + "return": { + "type": "int", + "comment": " Zero on success; -1 on failure." + }, + "description": "

Initializes a git_merge_options with default values. Equivalent to\n creating an instance with GIT_MERGE_OPTIONS_INIT.

\n", + "comments": "", + "group": "merge" + }, + "git_merge_analysis": { + "type": "function", + "file": "merge.h", + "line": 375, + "lineto": 380, + "args": [ + { + "name": "analysis_out", + "type": "git_merge_analysis_t *", + "comment": "analysis enumeration that the result is written into" + }, + { + "name": "preference_out", + "type": "git_merge_preference_t *", + "comment": null + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository to merge" + }, + { + "name": "their_heads", + "type": "const git_annotated_commit **", + "comment": "the heads to merge into" + }, + { + "name": "their_heads_len", + "type": "size_t", + "comment": "the number of heads to merge" + } + ], + "argline": "git_merge_analysis_t *analysis_out, git_merge_preference_t *preference_out, git_repository *repo, const git_annotated_commit **their_heads, size_t their_heads_len", + "sig": "git_merge_analysis_t *::git_merge_preference_t *::git_repository *::const git_annotated_commit **::size_t", + "return": { + "type": "int", + "comment": " 0 on success or error code" + }, + "description": "

Analyzes the given branch(es) and determines the opportunities for\n merging them into the HEAD of the repository.

\n", + "comments": "", + "group": "merge" + }, + "git_merge_base": { + "type": "function", + "file": "merge.h", + "line": 391, + "lineto": 395, + "args": [ + { + "name": "out", + "type": "git_oid *", + "comment": "the OID of a merge base between 'one' and 'two'" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository where the commits exist" + }, + { + "name": "one", + "type": "const git_oid *", + "comment": "one of the commits" + }, + { + "name": "two", + "type": "const git_oid *", + "comment": "the other commit" + } + ], + "argline": "git_oid *out, git_repository *repo, const git_oid *one, const git_oid *two", + "sig": "git_oid *::git_repository *::const git_oid *::const git_oid *", + "return": { + "type": "int", + "comment": " 0 on success, GIT_ENOTFOUND if not found or error code" + }, + "description": "

Find a merge base between two commits

\n", + "comments": "", + "group": "merge", + "examples": { + "log.c": [ + "ex/HEAD/log.html#git_merge_base-32" + ], + "rev-parse.c": [ + "ex/HEAD/rev-parse.html#git_merge_base-3" + ] + } + }, + "git_merge_bases": { + "type": "function", + "file": "merge.h", + "line": 406, + "lineto": 410, + "args": [ + { + "name": "out", + "type": "git_oidarray *", + "comment": "array in which to store the resulting ids" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository where the commits exist" + }, + { + "name": "one", + "type": "const git_oid *", + "comment": "one of the commits" + }, + { + "name": "two", + "type": "const git_oid *", + "comment": "the other commit" + } + ], + "argline": "git_oidarray *out, git_repository *repo, const git_oid *one, const git_oid *two", + "sig": "git_oidarray *::git_repository *::const git_oid *::const git_oid *", + "return": { + "type": "int", + "comment": " 0 on success, GIT_ENOTFOUND if not found or error code" + }, + "description": "

Find merge bases between two commits

\n", + "comments": "", + "group": "merge" + }, + "git_merge_base_many": { + "type": "function", + "file": "merge.h", + "line": 421, + "lineto": 425, + "args": [ + { + "name": "out", + "type": "git_oid *", + "comment": "the OID of a merge base considering all the commits" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository where the commits exist" + }, + { + "name": "length", + "type": "size_t", + "comment": "The number of commits in the provided `input_array`" + }, + { + "name": "input_array", + "type": "const git_oid []", + "comment": "oids of the commits" + } + ], + "argline": "git_oid *out, git_repository *repo, size_t length, const git_oid [] input_array", + "sig": "git_oid *::git_repository *::size_t::const git_oid []", + "return": { + "type": "int", + "comment": " Zero on success; GIT_ENOTFOUND or -1 on failure." + }, + "description": "

Find a merge base given a list of commits

\n", + "comments": "", + "group": "merge" + }, + "git_merge_bases_many": { + "type": "function", + "file": "merge.h", + "line": 436, + "lineto": 440, + "args": [ + { + "name": "out", + "type": "git_oidarray *", + "comment": "array in which to store the resulting ids" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository where the commits exist" + }, + { + "name": "length", + "type": "size_t", + "comment": "The number of commits in the provided `input_array`" + }, + { + "name": "input_array", + "type": "const git_oid []", + "comment": "oids of the commits" + } + ], + "argline": "git_oidarray *out, git_repository *repo, size_t length, const git_oid [] input_array", + "sig": "git_oidarray *::git_repository *::size_t::const git_oid []", + "return": { + "type": "int", + "comment": " Zero on success; GIT_ENOTFOUND or -1 on failure." + }, + "description": "

Find all merge bases given a list of commits

\n", + "comments": "", + "group": "merge" + }, + "git_merge_base_octopus": { + "type": "function", + "file": "merge.h", + "line": 451, + "lineto": 455, + "args": [ + { + "name": "out", + "type": "git_oid *", + "comment": "the OID of a merge base considering all the commits" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository where the commits exist" + }, + { + "name": "length", + "type": "size_t", + "comment": "The number of commits in the provided `input_array`" + }, + { + "name": "input_array", + "type": "const git_oid []", + "comment": "oids of the commits" + } + ], + "argline": "git_oid *out, git_repository *repo, size_t length, const git_oid [] input_array", + "sig": "git_oid *::git_repository *::size_t::const git_oid []", + "return": { + "type": "int", + "comment": " Zero on success; GIT_ENOTFOUND or -1 on failure." + }, + "description": "

Find a merge base in preparation for an octopus merge

\n", + "comments": "", + "group": "merge" + }, + "git_merge_file": { + "type": "function", + "file": "merge.h", + "line": 473, + "lineto": 478, + "args": [ + { + "name": "out", + "type": "git_merge_file_result *", + "comment": "The git_merge_file_result to be filled in" + }, + { + "name": "ancestor", + "type": "const git_merge_file_input *", + "comment": "The contents of the ancestor file" + }, + { + "name": "ours", + "type": "const git_merge_file_input *", + "comment": "The contents of the file in \"our\" side" + }, + { + "name": "theirs", + "type": "const git_merge_file_input *", + "comment": "The contents of the file in \"their\" side" + }, + { + "name": "opts", + "type": "const git_merge_file_options *", + "comment": "The merge file options or `NULL` for defaults" + } + ], + "argline": "git_merge_file_result *out, const git_merge_file_input *ancestor, const git_merge_file_input *ours, const git_merge_file_input *theirs, const git_merge_file_options *opts", + "sig": "git_merge_file_result *::const git_merge_file_input *::const git_merge_file_input *::const git_merge_file_input *::const git_merge_file_options *", + "return": { + "type": "int", + "comment": " 0 on success or error code" + }, + "description": "

Merge two files as they exist in the in-memory data structures, using\n the given common ancestor as the baseline, producing a\n git_merge_file_result that reflects the merge result. The\n git_merge_file_result must be freed with git_merge_file_result_free.

\n", + "comments": "

Note that this function does not reference a repository and any configuration must be passed as git_merge_file_options.

\n", + "group": "merge" + }, + "git_merge_file_from_index": { + "type": "function", + "file": "merge.h", + "line": 494, + "lineto": 500, + "args": [ + { + "name": "out", + "type": "git_merge_file_result *", + "comment": "The git_merge_file_result to be filled in" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "The repository" + }, + { + "name": "ancestor", + "type": "const git_index_entry *", + "comment": "The index entry for the ancestor file (stage level 1)" + }, + { + "name": "ours", + "type": "const git_index_entry *", + "comment": "The index entry for our file (stage level 2)" + }, + { + "name": "theirs", + "type": "const git_index_entry *", + "comment": "The index entry for their file (stage level 3)" + }, + { + "name": "opts", + "type": "const git_merge_file_options *", + "comment": "The merge file options or NULL" + } + ], + "argline": "git_merge_file_result *out, git_repository *repo, const git_index_entry *ancestor, const git_index_entry *ours, const git_index_entry *theirs, const git_merge_file_options *opts", + "sig": "git_merge_file_result *::git_repository *::const git_index_entry *::const git_index_entry *::const git_index_entry *::const git_merge_file_options *", + "return": { + "type": "int", + "comment": " 0 on success or error code" + }, + "description": "

Merge two files as they exist in the index, using the given common\n ancestor as the baseline, producing a git_merge_file_result that\n reflects the merge result. The git_merge_file_result must be freed with\n git_merge_file_result_free.

\n", + "comments": "", + "group": "merge" + }, + "git_merge_file_result_free": { + "type": "function", + "file": "merge.h", + "line": 507, + "lineto": 507, + "args": [ + { + "name": "result", + "type": "git_merge_file_result *", + "comment": "The result to free or `NULL`" + } + ], + "argline": "git_merge_file_result *result", + "sig": "git_merge_file_result *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Frees a git_merge_file_result.

\n", + "comments": "", + "group": "merge" + }, + "git_merge_trees": { + "type": "function", + "file": "merge.h", + "line": 525, + "lineto": 531, + "args": [ + { + "name": "out", + "type": "git_index **", + "comment": "pointer to store the index result in" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "repository that contains the given trees" + }, + { + "name": "ancestor_tree", + "type": "const git_tree *", + "comment": "the common ancestor between the trees (or null if none)" + }, + { + "name": "our_tree", + "type": "const git_tree *", + "comment": "the tree that reflects the destination tree" + }, + { + "name": "their_tree", + "type": "const git_tree *", + "comment": "the tree to merge in to `our_tree`" + }, + { + "name": "opts", + "type": "const git_merge_options *", + "comment": "the merge tree options (or null for defaults)" + } + ], + "argline": "git_index **out, git_repository *repo, const git_tree *ancestor_tree, const git_tree *our_tree, const git_tree *their_tree, const git_merge_options *opts", + "sig": "git_index **::git_repository *::const git_tree *::const git_tree *::const git_tree *::const git_merge_options *", + "return": { + "type": "int", + "comment": " 0 on success or error code" + }, + "description": "

Merge two trees, producing a git_index that reflects the result of\n the merge. The index may be written as-is to the working directory\n or checked out. If the index is to be converted to a tree, the caller\n should resolve any conflicts that arose as part of the merge.

\n", + "comments": "

The returned index must be freed explicitly with git_index_free.

\n", + "group": "merge" + }, + "git_merge_commits": { + "type": "function", + "file": "merge.h", + "line": 548, + "lineto": 553, + "args": [ + { + "name": "out", + "type": "git_index **", + "comment": "pointer to store the index result in" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "repository that contains the given trees" + }, + { + "name": "our_commit", + "type": "const git_commit *", + "comment": "the commit that reflects the destination tree" + }, + { + "name": "their_commit", + "type": "const git_commit *", + "comment": "the commit to merge in to `our_commit`" + }, + { + "name": "opts", + "type": "const git_merge_options *", + "comment": "the merge tree options (or null for defaults)" + } + ], + "argline": "git_index **out, git_repository *repo, const git_commit *our_commit, const git_commit *their_commit, const git_merge_options *opts", + "sig": "git_index **::git_repository *::const git_commit *::const git_commit *::const git_merge_options *", + "return": { + "type": "int", + "comment": " 0 on success or error code" + }, + "description": "

Merge two commits, producing a git_index that reflects the result of\n the merge. The index may be written as-is to the working directory\n or checked out. If the index is to be converted to a tree, the caller\n should resolve any conflicts that arose as part of the merge.

\n", + "comments": "

The returned index must be freed explicitly with git_index_free.

\n", + "group": "merge" + }, + "git_merge": { + "type": "function", + "file": "merge.h", + "line": 573, + "lineto": 578, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository to merge" + }, + { + "name": "their_heads", + "type": "const git_annotated_commit **", + "comment": "the heads to merge into" + }, + { + "name": "their_heads_len", + "type": "size_t", + "comment": "the number of heads to merge" + }, + { + "name": "merge_opts", + "type": "const git_merge_options *", + "comment": "merge options" + }, + { + "name": "checkout_opts", + "type": "const git_checkout_options *", + "comment": "checkout options" + } + ], + "argline": "git_repository *repo, const git_annotated_commit **their_heads, size_t their_heads_len, const git_merge_options *merge_opts, const git_checkout_options *checkout_opts", + "sig": "git_repository *::const git_annotated_commit **::size_t::const git_merge_options *::const git_checkout_options *", + "return": { + "type": "int", + "comment": " 0 on success or error code" + }, + "description": "

Merges the given commit(s) into HEAD, writing the results into the working\n directory. Any changes are staged for commit and any conflicts are written\n to the index. Callers should inspect the repository's index after this\n completes, resolve any conflicts and prepare a commit.

\n", + "comments": "

For compatibility with git, the repository is put into a merging state. Once the commit is done (or if the uses wishes to abort), you should clear this state by calling git_repository_state_cleanup().

\n", + "group": "merge" + }, + "git_message_prettify": { + "type": "function", + "file": "message.h", + "line": 39, + "lineto": 39, + "args": [ + { + "name": "out", + "type": "git_buf *", + "comment": "The user-allocated git_buf which will be filled with the\n cleaned up message." + }, + { + "name": "message", + "type": "const char *", + "comment": "The message to be prettified." + }, + { + "name": "strip_comments", + "type": "int", + "comment": "Non-zero to remove comment lines, 0 to leave them in." + }, + { + "name": "comment_char", + "type": "char", + "comment": "Comment character. Lines starting with this character\n are considered to be comments and removed if `strip_comments` is non-zero." + } + ], + "argline": "git_buf *out, const char *message, int strip_comments, char comment_char", + "sig": "git_buf *::const char *::int::char", + "return": { + "type": "int", + "comment": " 0 or an error code." + }, + "description": "

Clean up message from excess whitespace and make sure that the last line\n ends with a '

\n\n

'.

\n", + "comments": "

Optionally, can remove lines starting with a "#".

\n", + "group": "message" + }, + "git_note_iterator_new": { + "type": "function", + "file": "notes.h", + "line": 49, + "lineto": 52, + "args": [ + { + "name": "out", + "type": "git_note_iterator **", + "comment": "pointer to the iterator" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "repository where to look up the note" + }, + { + "name": "notes_ref", + "type": "const char *", + "comment": "canonical name of the reference to use (optional); defaults to\n \"refs/notes/commits\"" + } + ], + "argline": "git_note_iterator **out, git_repository *repo, const char *notes_ref", + "sig": "git_note_iterator **::git_repository *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Creates a new iterator for notes

\n", + "comments": "

The iterator must be freed manually by the user.

\n", + "group": "note" + }, + "git_note_iterator_free": { + "type": "function", + "file": "notes.h", + "line": 59, + "lineto": 59, + "args": [ + { + "name": "it", + "type": "git_note_iterator *", + "comment": "pointer to the iterator" + } + ], + "argline": "git_note_iterator *it", + "sig": "git_note_iterator *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Frees an git_note_iterator

\n", + "comments": "", + "group": "note" + }, + "git_note_next": { + "type": "function", + "file": "notes.h", + "line": 72, + "lineto": 75, + "args": [ + { + "name": "note_id", + "type": "git_oid *", + "comment": "id of blob containing the message" + }, + { + "name": "annotated_id", + "type": "git_oid *", + "comment": "id of the git object being annotated" + }, + { + "name": "it", + "type": "git_note_iterator *", + "comment": "pointer to the iterator" + } + ], + "argline": "git_oid *note_id, git_oid *annotated_id, git_note_iterator *it", + "sig": "git_oid *::git_oid *::git_note_iterator *", + "return": { + "type": "int", + "comment": " 0 (no error), GIT_ITEROVER (iteration is done) or an error code\n (negative value)" + }, + "description": "

Return the current item (note_id and annotated_id) and advance the iterator\n internally to the next value

\n", + "comments": "", + "group": "note" + }, + "git_note_read": { + "type": "function", + "file": "notes.h", + "line": 91, + "lineto": 95, + "args": [ + { + "name": "out", + "type": "git_note **", + "comment": "pointer to the read note; NULL in case of error" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "repository where to look up the note" + }, + { + "name": "notes_ref", + "type": "const char *", + "comment": "canonical name of the reference to use (optional); defaults to\n \"refs/notes/commits\"" + }, + { + "name": "oid", + "type": "const git_oid *", + "comment": "OID of the git object to read the note from" + } + ], + "argline": "git_note **out, git_repository *repo, const char *notes_ref, const git_oid *oid", + "sig": "git_note **::git_repository *::const char *::const git_oid *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Read the note for an object

\n", + "comments": "

The note must be freed manually by the user.

\n", + "group": "note" + }, + "git_note_author": { + "type": "function", + "file": "notes.h", + "line": 103, + "lineto": 103, + "args": [ + { + "name": "note", + "type": "const git_note *", + "comment": "the note" + } + ], + "argline": "const git_note *note", + "sig": "const git_note *", + "return": { + "type": "const git_signature *", + "comment": " the author" + }, + "description": "

Get the note author

\n", + "comments": "", + "group": "note" + }, + "git_note_committer": { + "type": "function", + "file": "notes.h", + "line": 111, + "lineto": 111, + "args": [ + { + "name": "note", + "type": "const git_note *", + "comment": "the note" + } + ], + "argline": "const git_note *note", + "sig": "const git_note *", + "return": { + "type": "const git_signature *", + "comment": " the committer" + }, + "description": "

Get the note committer

\n", + "comments": "", + "group": "note" + }, + "git_note_message": { + "type": "function", + "file": "notes.h", + "line": 120, + "lineto": 120, + "args": [ + { + "name": "note", + "type": "const git_note *", + "comment": "the note" + } + ], + "argline": "const git_note *note", + "sig": "const git_note *", + "return": { + "type": "const char *", + "comment": " the note message" + }, + "description": "

Get the note message

\n", + "comments": "", + "group": "note" + }, + "git_note_id": { + "type": "function", + "file": "notes.h", + "line": 129, + "lineto": 129, + "args": [ + { + "name": "note", + "type": "const git_note *", + "comment": "the note" + } + ], + "argline": "const git_note *note", + "sig": "const git_note *", + "return": { + "type": "const git_oid *", + "comment": " the note object's id" + }, + "description": "

Get the note object's id

\n", + "comments": "", + "group": "note" + }, + "git_note_create": { + "type": "function", + "file": "notes.h", + "line": 146, + "lineto": 154, + "args": [ + { + "name": "out", + "type": "git_oid *", + "comment": "pointer to store the OID (optional); NULL in case of error" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "repository where to store the note" + }, + { + "name": "notes_ref", + "type": "const char *", + "comment": "canonical name of the reference to use (optional);\n\t\t\t\t\tdefaults to \"refs/notes/commits\"" + }, + { + "name": "author", + "type": "const git_signature *", + "comment": "signature of the notes commit author" + }, + { + "name": "committer", + "type": "const git_signature *", + "comment": "signature of the notes commit committer" + }, + { + "name": "oid", + "type": "const git_oid *", + "comment": "OID of the git object to decorate" + }, + { + "name": "note", + "type": "const char *", + "comment": "Content of the note to add for object oid" + }, + { + "name": "force", + "type": "int", + "comment": "Overwrite existing note" + } + ], + "argline": "git_oid *out, git_repository *repo, const char *notes_ref, const git_signature *author, const git_signature *committer, const git_oid *oid, const char *note, int force", + "sig": "git_oid *::git_repository *::const char *::const git_signature *::const git_signature *::const git_oid *::const char *::int", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Add a note for an object

\n", + "comments": "", + "group": "note" + }, + "git_note_remove": { + "type": "function", + "file": "notes.h", + "line": 169, + "lineto": 174, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "repository where the note lives" + }, + { + "name": "notes_ref", + "type": "const char *", + "comment": "canonical name of the reference to use (optional);\n\t\t\t\t\tdefaults to \"refs/notes/commits\"" + }, + { + "name": "author", + "type": "const git_signature *", + "comment": "signature of the notes commit author" + }, + { + "name": "committer", + "type": "const git_signature *", + "comment": "signature of the notes commit committer" + }, + { + "name": "oid", + "type": "const git_oid *", + "comment": "OID of the git object to remove the note from" + } + ], + "argline": "git_repository *repo, const char *notes_ref, const git_signature *author, const git_signature *committer, const git_oid *oid", + "sig": "git_repository *::const char *::const git_signature *::const git_signature *::const git_oid *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Remove the note for an object

\n", + "comments": "", + "group": "note" + }, + "git_note_free": { + "type": "function", + "file": "notes.h", + "line": 181, + "lineto": 181, + "args": [ + { + "name": "note", + "type": "git_note *", + "comment": "git_note object" + } + ], + "argline": "git_note *note", + "sig": "git_note *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Free a git_note object

\n", + "comments": "", + "group": "note" + }, + "git_note_foreach": { + "type": "function", + "file": "notes.h", + "line": 209, + "lineto": 213, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "Repository where to find the notes." + }, + { + "name": "notes_ref", + "type": "const char *", + "comment": "Reference to read from (optional); defaults to\n \"refs/notes/commits\"." + }, + { + "name": "note_cb", + "type": "git_note_foreach_cb", + "comment": "Callback to invoke per found annotation. Return non-zero\n to stop looping." + }, + { + "name": "payload", + "type": "void *", + "comment": "Extra parameter to callback function." + } + ], + "argline": "git_repository *repo, const char *notes_ref, git_note_foreach_cb note_cb, void *payload", + "sig": "git_repository *::const char *::git_note_foreach_cb::void *", + "return": { + "type": "int", + "comment": " 0 on success, non-zero callback return value, or error code" + }, + "description": "

Loop over all the notes within a specified namespace\n and issue a callback for each one.

\n", + "comments": "", + "group": "note" + }, + "git_object_lookup": { + "type": "function", + "file": "object.h", + "line": 42, + "lineto": 46, + "args": [ + { + "name": "object", + "type": "git_object **", + "comment": "pointer to the looked-up object" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository to look up the object" + }, + { + "name": "id", + "type": "const git_oid *", + "comment": "the unique identifier for the object" + }, + { + "name": "type", + "type": "git_otype", + "comment": "the type of the object" + } + ], + "argline": "git_object **object, git_repository *repo, const git_oid *id, git_otype type", + "sig": "git_object **::git_repository *::const git_oid *::git_otype", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Lookup a reference to one of the objects in a repository.

\n", + "comments": "

The generated reference is owned by the repository and should be closed with the git_object_free method instead of free'd manually.

\n\n

The 'type' parameter must match the type of the object in the odb; the method will fail otherwise. The special value 'GIT_OBJ_ANY' may be passed to let the method guess the object's type.

\n", + "group": "object", + "examples": { + "log.c": [ + "ex/HEAD/log.html#git_object_lookup-33" + ] + } + }, + "git_object_lookup_prefix": { + "type": "function", + "file": "object.h", + "line": 75, + "lineto": 80, + "args": [ + { + "name": "object_out", + "type": "git_object **", + "comment": "pointer where to store the looked-up object" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository to look up the object" + }, + { + "name": "id", + "type": "const git_oid *", + "comment": "a short identifier for the object" + }, + { + "name": "len", + "type": "size_t", + "comment": "the length of the short identifier" + }, + { + "name": "type", + "type": "git_otype", + "comment": "the type of the object" + } + ], + "argline": "git_object **object_out, git_repository *repo, const git_oid *id, size_t len, git_otype type", + "sig": "git_object **::git_repository *::const git_oid *::size_t::git_otype", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Lookup a reference to one of the objects in a repository,\n given a prefix of its identifier (short id).

\n", + "comments": "

The object obtained will be so that its identifier matches the first 'len' hexadecimal characters (packets of 4 bits) of the given 'id'. 'len' must be at least GIT_OID_MINPREFIXLEN, and long enough to identify a unique object matching the prefix; otherwise the method will fail.

\n\n

The generated reference is owned by the repository and should be closed with the git_object_free method instead of free'd manually.

\n\n

The 'type' parameter must match the type of the object in the odb; the method will fail otherwise. The special value 'GIT_OBJ_ANY' may be passed to let the method guess the object's type.

\n", + "group": "object" + }, + "git_object_lookup_bypath": { + "type": "function", + "file": "object.h", + "line": 93, + "lineto": 97, + "args": [ + { + "name": "out", + "type": "git_object **", + "comment": "buffer that receives a pointer to the object (which must be freed\n by the caller)" + }, + { + "name": "treeish", + "type": "const git_object *", + "comment": "root object that can be peeled to a tree" + }, + { + "name": "path", + "type": "const char *", + "comment": "relative path from the root object to the desired object" + }, + { + "name": "type", + "type": "git_otype", + "comment": "type of object desired" + } + ], + "argline": "git_object **out, const git_object *treeish, const char *path, git_otype type", + "sig": "git_object **::const git_object *::const char *::git_otype", + "return": { + "type": "int", + "comment": " 0 on success, or an error code" + }, + "description": "

Lookup an object that represents a tree entry.

\n", + "comments": "", + "group": "object" + }, + "git_object_id": { + "type": "function", + "file": "object.h", + "line": 105, + "lineto": 105, + "args": [ + { + "name": "obj", + "type": "const git_object *", + "comment": "the repository object" + } + ], + "argline": "const git_object *obj", + "sig": "const git_object *", + "return": { + "type": "const git_oid *", + "comment": " the SHA1 id" + }, + "description": "

Get the id (SHA1) of a repository object

\n", + "comments": "", + "group": "object", + "examples": { + "blame.c": [ + "ex/HEAD/blame.html#git_object_id-10", + "ex/HEAD/blame.html#git_object_id-11", + "ex/HEAD/blame.html#git_object_id-12", + "ex/HEAD/blame.html#git_object_id-13" + ], + "cat-file.c": [ + "ex/HEAD/cat-file.html#git_object_id-12", + "ex/HEAD/cat-file.html#git_object_id-13" + ], + "log.c": [ + "ex/HEAD/log.html#git_object_id-34", + "ex/HEAD/log.html#git_object_id-35", + "ex/HEAD/log.html#git_object_id-36", + "ex/HEAD/log.html#git_object_id-37" + ], + "rev-parse.c": [ + "ex/HEAD/rev-parse.html#git_object_id-4", + "ex/HEAD/rev-parse.html#git_object_id-5", + "ex/HEAD/rev-parse.html#git_object_id-6", + "ex/HEAD/rev-parse.html#git_object_id-7", + "ex/HEAD/rev-parse.html#git_object_id-8" + ] + } + }, + "git_object_short_id": { + "type": "function", + "file": "object.h", + "line": 119, + "lineto": 119, + "args": [ + { + "name": "out", + "type": "git_buf *", + "comment": "Buffer to write string into" + }, + { + "name": "obj", + "type": "const git_object *", + "comment": "The object to get an ID for" + } + ], + "argline": "git_buf *out, const git_object *obj", + "sig": "git_buf *::const git_object *", + "return": { + "type": "int", + "comment": " 0 on success, \n<\n0 for error" + }, + "description": "

Get a short abbreviated OID string for the object

\n", + "comments": "

This starts at the "core.abbrev" length (default 7 characters) and iteratively extends to a longer string if that length is ambiguous. The result will be unambiguous (at least until new objects are added to the repository).

\n", + "group": "object", + "examples": { + "tag.c": [ + "ex/HEAD/tag.html#git_object_short_id-5" + ] + } + }, + "git_object_type": { + "type": "function", + "file": "object.h", + "line": 127, + "lineto": 127, + "args": [ + { + "name": "obj", + "type": "const git_object *", + "comment": "the repository object" + } + ], + "argline": "const git_object *obj", + "sig": "const git_object *", + "return": { + "type": "git_otype", + "comment": " the object's type" + }, + "description": "

Get the object type of an object

\n", + "comments": "", + "group": "object", + "examples": { + "cat-file.c": [ + "ex/HEAD/cat-file.html#git_object_type-14", + "ex/HEAD/cat-file.html#git_object_type-15", + "ex/HEAD/cat-file.html#git_object_type-16" + ], + "tag.c": [ + "ex/HEAD/tag.html#git_object_type-6" + ] + } + }, + "git_object_owner": { + "type": "function", + "file": "object.h", + "line": 141, + "lineto": 141, + "args": [ + { + "name": "obj", + "type": "const git_object *", + "comment": "the object" + } + ], + "argline": "const git_object *obj", + "sig": "const git_object *", + "return": { + "type": "git_repository *", + "comment": " the repository who owns this object" + }, + "description": "

Get the repository that owns this object

\n", + "comments": "

Freeing or calling git_repository_close on the returned pointer will invalidate the actual object.

\n\n

Any other operation may be run on the repository without affecting the object.

\n", + "group": "object" + }, + "git_object_free": { + "type": "function", + "file": "object.h", + "line": 158, + "lineto": 158, + "args": [ + { + "name": "object", + "type": "git_object *", + "comment": "the object to close" + } + ], + "argline": "git_object *object", + "sig": "git_object *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Close an open object

\n", + "comments": "

This method instructs the library to close an existing object; note that git_objects are owned and cached by the repository so the object may or may not be freed after this library call, depending on how aggressive is the caching mechanism used by the repository.

\n\n

IMPORTANT: It is necessary to call this method when you stop using an object. Failure to do so will cause a memory leak.

\n", + "group": "object", + "examples": { + "blame.c": [ + "ex/HEAD/blame.html#git_object_free-14", + "ex/HEAD/blame.html#git_object_free-15", + "ex/HEAD/blame.html#git_object_free-16", + "ex/HEAD/blame.html#git_object_free-17" + ], + "cat-file.c": [ + "ex/HEAD/cat-file.html#git_object_free-17" + ], + "general.c": [ + "ex/HEAD/general.html#git_object_free-32" + ], + "log.c": [ + "ex/HEAD/log.html#git_object_free-38" + ], + "rev-parse.c": [ + "ex/HEAD/rev-parse.html#git_object_free-9", + "ex/HEAD/rev-parse.html#git_object_free-10", + "ex/HEAD/rev-parse.html#git_object_free-11" + ], + "tag.c": [ + "ex/HEAD/tag.html#git_object_free-7", + "ex/HEAD/tag.html#git_object_free-8", + "ex/HEAD/tag.html#git_object_free-9", + "ex/HEAD/tag.html#git_object_free-10" + ] + } + }, + "git_object_type2string": { + "type": "function", + "file": "object.h", + "line": 169, + "lineto": 169, + "args": [ + { + "name": "type", + "type": "git_otype", + "comment": "object type to convert." + } + ], + "argline": "git_otype type", + "sig": "git_otype", + "return": { + "type": "const char *", + "comment": " the corresponding string representation." + }, + "description": "

Convert an object type to its string representation.

\n", + "comments": "

The result is a pointer to a string in static memory and should not be free()'ed.

\n", + "group": "object", + "examples": { + "cat-file.c": [ + "ex/HEAD/cat-file.html#git_object_type2string-18", + "ex/HEAD/cat-file.html#git_object_type2string-19", + "ex/HEAD/cat-file.html#git_object_type2string-20", + "ex/HEAD/cat-file.html#git_object_type2string-21" + ], + "general.c": [ + "ex/HEAD/general.html#git_object_type2string-33" + ] + } + }, + "git_object_string2type": { + "type": "function", + "file": "object.h", + "line": 177, + "lineto": 177, + "args": [ + { + "name": "str", + "type": "const char *", + "comment": "the string to convert." + } + ], + "argline": "const char *str", + "sig": "const char *", + "return": { + "type": "git_otype", + "comment": " the corresponding git_otype." + }, + "description": "

Convert a string object type representation to it's git_otype.

\n", + "comments": "", + "group": "object" + }, + "git_object_typeisloose": { + "type": "function", + "file": "object.h", + "line": 186, + "lineto": 186, + "args": [ + { + "name": "type", + "type": "git_otype", + "comment": "object type to test." + } + ], + "argline": "git_otype type", + "sig": "git_otype", + "return": { + "type": "int", + "comment": " true if the type represents a valid loose object type,\n false otherwise." + }, + "description": "

Determine if the given git_otype is a valid loose object type.

\n", + "comments": "", + "group": "object" + }, + "git_object__size": { + "type": "function", + "file": "object.h", + "line": 200, + "lineto": 200, + "args": [ + { + "name": "type", + "type": "git_otype", + "comment": "object type to get its size" + } + ], + "argline": "git_otype type", + "sig": "git_otype", + "return": { + "type": "size_t", + "comment": " size in bytes of the object" + }, + "description": "

Get the size in bytes for the structure which\n acts as an in-memory representation of any given\n object type.

\n", + "comments": "

For all the core types, this would the equivalent of calling sizeof(git_commit) if the core types were not opaque on the external API.

\n", + "group": "object" + }, + "git_object_peel": { + "type": "function", + "file": "object.h", + "line": 225, + "lineto": 228, + "args": [ + { + "name": "peeled", + "type": "git_object **", + "comment": "Pointer to the peeled git_object" + }, + { + "name": "object", + "type": "const git_object *", + "comment": "The object to be processed" + }, + { + "name": "target_type", + "type": "git_otype", + "comment": "The type of the requested object (a GIT_OBJ_ value)" + } + ], + "argline": "git_object **peeled, const git_object *object, git_otype target_type", + "sig": "git_object **::const git_object *::git_otype", + "return": { + "type": "int", + "comment": " 0 on success, GIT_EINVALIDSPEC, GIT_EPEEL, or an error code" + }, + "description": "

Recursively peel an object until an object of the specified type is met.

\n", + "comments": "

If the query cannot be satisfied due to the object model, GIT_EINVALIDSPEC will be returned (e.g. trying to peel a blob to a tree).

\n\n

If you pass GIT_OBJ_ANY as the target type, then the object will be peeled until the type changes. A tag will be peeled until the referenced object is no longer a tag, and a commit will be peeled to a tree. Any other object type will return GIT_EINVALIDSPEC.

\n\n

If peeling a tag we discover an object which cannot be peeled to the target type due to the object model, GIT_EPEEL will be returned.

\n\n

You must free the returned object.

\n", + "group": "object" + }, + "git_object_dup": { + "type": "function", + "file": "object.h", + "line": 237, + "lineto": 237, + "args": [ + { + "name": "dest", + "type": "git_object **", + "comment": "Pointer to store the copy of the object" + }, + { + "name": "source", + "type": "git_object *", + "comment": "Original object to copy" + } + ], + "argline": "git_object **dest, git_object *source", + "sig": "git_object **::git_object *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Create an in-memory copy of a Git object. The copy must be\n explicitly free'd or it will leak.

\n", + "comments": "", + "group": "object" + }, + "git_odb_new": { + "type": "function", + "file": "odb.h", + "line": 39, + "lineto": 39, + "args": [ + { + "name": "out", + "type": "git_odb **", + "comment": "location to store the database pointer, if opened.\n\t\t\tSet to NULL if the open failed." + } + ], + "argline": "git_odb **out", + "sig": "git_odb **", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Create a new object database with no backends.

\n", + "comments": "

Before the ODB can be used for read/writing, a custom database backend must be manually added using git_odb_add_backend()

\n", + "group": "odb" + }, + "git_odb_open": { + "type": "function", + "file": "odb.h", + "line": 57, + "lineto": 57, + "args": [ + { + "name": "out", + "type": "git_odb **", + "comment": "location to store the database pointer, if opened.\n\t\t\tSet to NULL if the open failed." + }, + { + "name": "objects_dir", + "type": "const char *", + "comment": "path of the backends' \"objects\" directory." + } + ], + "argline": "git_odb **out, const char *objects_dir", + "sig": "git_odb **::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Create a new object database and automatically add\n the two default backends:

\n", + "comments": "
- git_odb_backend_loose: read and write loose object files      from disk, assuming `objects_dir` as the Objects folder\n\n- git_odb_backend_pack: read objects from packfiles,        assuming `objects_dir` as the Objects folder which      contains a 'pack/' folder with the corresponding data\n
\n", + "group": "odb" + }, + "git_odb_add_disk_alternate": { + "type": "function", + "file": "odb.h", + "line": 74, + "lineto": 74, + "args": [ + { + "name": "odb", + "type": "git_odb *", + "comment": "database to add the backend to" + }, + { + "name": "path", + "type": "const char *", + "comment": "path to the objects folder for the alternate" + } + ], + "argline": "git_odb *odb, const char *path", + "sig": "git_odb *::const char *", + "return": { + "type": "int", + "comment": " 0 on success; error code otherwise" + }, + "description": "

Add an on-disk alternate to an existing Object DB.

\n", + "comments": "

Note that the added path must point to an objects, not to a full repository, to use it as an alternate store.

\n\n

Alternate backends are always checked for objects after all the main backends have been exhausted.

\n\n

Writing is disabled on alternate backends.

\n", + "group": "odb" + }, + "git_odb_free": { + "type": "function", + "file": "odb.h", + "line": 81, + "lineto": 81, + "args": [ + { + "name": "db", + "type": "git_odb *", + "comment": "database pointer to close. If NULL no action is taken." + } + ], + "argline": "git_odb *db", + "sig": "git_odb *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Close an open object database.

\n", + "comments": "", + "group": "odb", + "examples": { + "cat-file.c": [ + "ex/HEAD/cat-file.html#git_odb_free-22" + ] + } + }, + "git_odb_read": { + "type": "function", + "file": "odb.h", + "line": 100, + "lineto": 100, + "args": [ + { + "name": "out", + "type": "git_odb_object **", + "comment": "pointer where to store the read object" + }, + { + "name": "db", + "type": "git_odb *", + "comment": "database to search for the object in." + }, + { + "name": "id", + "type": "const git_oid *", + "comment": "identity of the object to read." + } + ], + "argline": "git_odb_object **out, git_odb *db, const git_oid *id", + "sig": "git_odb_object **::git_odb *::const git_oid *", + "return": { + "type": "int", + "comment": " - 0 if the object was read;\n - GIT_ENOTFOUND if the object is not in the database." + }, + "description": "

Read an object from the database.

\n", + "comments": "

This method queries all available ODB backends trying to read the given OID.

\n\n

The returned object is reference counted and internally cached, so it should be closed by the user once it's no longer in use.

\n", + "group": "odb", + "examples": { + "cat-file.c": [ + "ex/HEAD/cat-file.html#git_odb_read-23" + ], + "general.c": [ + "ex/HEAD/general.html#git_odb_read-34" + ] + } + }, + "git_odb_read_prefix": { + "type": "function", + "file": "odb.h", + "line": 129, + "lineto": 129, + "args": [ + { + "name": "out", + "type": "git_odb_object **", + "comment": "pointer where to store the read object" + }, + { + "name": "db", + "type": "git_odb *", + "comment": "database to search for the object in." + }, + { + "name": "short_id", + "type": "const git_oid *", + "comment": "a prefix of the id of the object to read." + }, + { + "name": "len", + "type": "size_t", + "comment": "the length of the prefix" + } + ], + "argline": "git_odb_object **out, git_odb *db, const git_oid *short_id, size_t len", + "sig": "git_odb_object **::git_odb *::const git_oid *::size_t", + "return": { + "type": "int", + "comment": " - 0 if the object was read;\n - GIT_ENOTFOUND if the object is not in the database.\n - GIT_EAMBIGUOUS if the prefix is ambiguous (several objects match the prefix)" + }, + "description": "

Read an object from the database, given a prefix\n of its identifier.

\n", + "comments": "

This method queries all available ODB backends trying to match the 'len' first hexadecimal characters of the 'short_id'. The remaining (GIT_OID_HEXSZ-len)*4 bits of 'short_id' must be 0s. 'len' must be at least GIT_OID_MINPREFIXLEN, and the prefix must be long enough to identify a unique object in all the backends; the method will fail otherwise.

\n\n

The returned object is reference counted and internally cached, so it should be closed by the user once it's no longer in use.

\n", + "group": "odb" + }, + "git_odb_read_header": { + "type": "function", + "file": "odb.h", + "line": 149, + "lineto": 149, + "args": [ + { + "name": "len_out", + "type": "size_t *", + "comment": "pointer where to store the length" + }, + { + "name": "type_out", + "type": "git_otype *", + "comment": "pointer where to store the type" + }, + { + "name": "db", + "type": "git_odb *", + "comment": "database to search for the object in." + }, + { + "name": "id", + "type": "const git_oid *", + "comment": "identity of the object to read." + } + ], + "argline": "size_t *len_out, git_otype *type_out, git_odb *db, const git_oid *id", + "sig": "size_t *::git_otype *::git_odb *::const git_oid *", + "return": { + "type": "int", + "comment": " - 0 if the object was read;\n - GIT_ENOTFOUND if the object is not in the database." + }, + "description": "

Read the header of an object from the database, without\n reading its full contents.

\n", + "comments": "

The header includes the length and the type of an object.

\n\n

Note that most backends do not support reading only the header of an object, so the whole object will be read and then the header will be returned.

\n", + "group": "odb" + }, + "git_odb_exists": { + "type": "function", + "file": "odb.h", + "line": 160, + "lineto": 160, + "args": [ + { + "name": "db", + "type": "git_odb *", + "comment": "database to be searched for the given object." + }, + { + "name": "id", + "type": "const git_oid *", + "comment": "the object to search for." + } + ], + "argline": "git_odb *db, const git_oid *id", + "sig": "git_odb *::const git_oid *", + "return": { + "type": "int", + "comment": " - 1, if the object was found\n - 0, otherwise" + }, + "description": "

Determine if the given object can be found in the object database.

\n", + "comments": "", + "group": "odb" + }, + "git_odb_exists_prefix": { + "type": "function", + "file": "odb.h", + "line": 173, + "lineto": 174, + "args": [ + { + "name": "out", + "type": "git_oid *", + "comment": "The full OID of the found object if just one is found." + }, + { + "name": "db", + "type": "git_odb *", + "comment": "The database to be searched for the given object." + }, + { + "name": "short_id", + "type": "const git_oid *", + "comment": "A prefix of the id of the object to read." + }, + { + "name": "len", + "type": "size_t", + "comment": "The length of the prefix." + } + ], + "argline": "git_oid *out, git_odb *db, const git_oid *short_id, size_t len", + "sig": "git_oid *::git_odb *::const git_oid *::size_t", + "return": { + "type": "int", + "comment": " 0 if found, GIT_ENOTFOUND if not found, GIT_EAMBIGUOUS if multiple\n matches were found, other value \n<\n 0 if there was a read error." + }, + "description": "

Determine if an object can be found in the object database by an\n abbreviated object ID.

\n", + "comments": "", + "group": "odb" + }, + "git_odb_expand_ids": { + "type": "function", + "file": "odb.h", + "line": 215, + "lineto": 218, + "args": [ + { + "name": "db", + "type": "git_odb *", + "comment": "The database to be searched for the given objects." + }, + { + "name": "ids", + "type": "git_odb_expand_id *", + "comment": "An array of short object IDs to search for" + }, + { + "name": "count", + "type": "size_t", + "comment": "The length of the `ids` array" + } + ], + "argline": "git_odb *db, git_odb_expand_id *ids, size_t count", + "sig": "git_odb *::git_odb_expand_id *::size_t", + "return": { + "type": "int", + "comment": " 0 on success or an error code on failure" + }, + "description": "

Determine if one or more objects can be found in the object database\n by their abbreviated object ID and type. The given array will be\n updated in place: for each abbreviated ID that is unique in the\n database, and of the given type (if specified), the full object ID,\n object ID length (GIT_OID_HEXSZ) and type will be written back to\n the array. For IDs that are not found (or are ambiguous), the\n array entry will be zeroed.

\n", + "comments": "

Note that since this function operates on multiple objects, the underlying database will not be asked to be reloaded if an object is not found (which is unlike other object database operations.)

\n", + "group": "odb" + }, + "git_odb_refresh": { + "type": "function", + "file": "odb.h", + "line": 238, + "lineto": 238, + "args": [ + { + "name": "db", + "type": "struct git_odb *", + "comment": "database to refresh" + } + ], + "argline": "struct git_odb *db", + "sig": "struct git_odb *", + "return": { + "type": "int", + "comment": " 0 on success, error code otherwise" + }, + "description": "

Refresh the object database to load newly added files.

\n", + "comments": "

If the object databases have changed on disk while the library is running, this function will force a reload of the underlying indexes.

\n\n

Use this function when you're confident that an external application has tampered with the ODB.

\n\n

NOTE that it is not necessary to call this function at all. The library will automatically attempt to refresh the ODB when a lookup fails, to see if the looked up object exists on disk but hasn't been loaded yet.

\n", + "group": "odb" + }, + "git_odb_foreach": { + "type": "function", + "file": "odb.h", + "line": 253, + "lineto": 253, + "args": [ + { + "name": "db", + "type": "git_odb *", + "comment": "database to use" + }, + { + "name": "cb", + "type": "git_odb_foreach_cb", + "comment": "the callback to call for each object" + }, + { + "name": "payload", + "type": "void *", + "comment": "data to pass to the callback" + } + ], + "argline": "git_odb *db, git_odb_foreach_cb cb, void *payload", + "sig": "git_odb *::git_odb_foreach_cb::void *", + "return": { + "type": "int", + "comment": " 0 on success, non-zero callback return value, or error code" + }, + "description": "

List all objects available in the database

\n", + "comments": "

The callback will be called for each object available in the database. Note that the objects are likely to be returned in the index order, which would make accessing the objects in that order inefficient. Return a non-zero value from the callback to stop looping.

\n", + "group": "odb" + }, + "git_odb_write": { + "type": "function", + "file": "odb.h", + "line": 273, + "lineto": 273, + "args": [ + { + "name": "out", + "type": "git_oid *", + "comment": "pointer to store the OID result of the write" + }, + { + "name": "odb", + "type": "git_odb *", + "comment": "object database where to store the object" + }, + { + "name": "data", + "type": "const void *", + "comment": "buffer with the data to store" + }, + { + "name": "len", + "type": "size_t", + "comment": "size of the buffer" + }, + { + "name": "type", + "type": "git_otype", + "comment": "type of the data to store" + } + ], + "argline": "git_oid *out, git_odb *odb, const void *data, size_t len, git_otype type", + "sig": "git_oid *::git_odb *::const void *::size_t::git_otype", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Write an object directly into the ODB

\n", + "comments": "

This method writes a full object straight into the ODB. For most cases, it is preferred to write objects through a write stream, which is both faster and less memory intensive, specially for big objects.

\n\n

This method is provided for compatibility with custom backends which are not able to support streaming writes

\n", + "group": "odb", + "examples": { + "general.c": [ + "ex/HEAD/general.html#git_odb_write-35" + ] + } + }, + "git_odb_open_wstream": { + "type": "function", + "file": "odb.h", + "line": 296, + "lineto": 296, + "args": [ + { + "name": "out", + "type": "git_odb_stream **", + "comment": "pointer where to store the stream" + }, + { + "name": "db", + "type": "git_odb *", + "comment": "object database where the stream will write" + }, + { + "name": "size", + "type": "git_off_t", + "comment": "final size of the object that will be written" + }, + { + "name": "type", + "type": "git_otype", + "comment": "type of the object that will be written" + } + ], + "argline": "git_odb_stream **out, git_odb *db, git_off_t size, git_otype type", + "sig": "git_odb_stream **::git_odb *::git_off_t::git_otype", + "return": { + "type": "int", + "comment": " 0 if the stream was created; error code otherwise" + }, + "description": "

Open a stream to write an object into the ODB

\n", + "comments": "

The type and final length of the object must be specified when opening the stream.

\n\n

The returned stream will be of type GIT_STREAM_WRONLY, and it won't be effective until git_odb_stream_finalize_write is called and returns without an error

\n\n

The stream must always be freed when done with git_odb_stream_free or will leak memory.

\n", + "group": "odb" + }, + "git_odb_stream_write": { + "type": "function", + "file": "odb.h", + "line": 309, + "lineto": 309, + "args": [ + { + "name": "stream", + "type": "git_odb_stream *", + "comment": "the stream" + }, + { + "name": "buffer", + "type": "const char *", + "comment": "the data to write" + }, + { + "name": "len", + "type": "size_t", + "comment": "the buffer's length" + } + ], + "argline": "git_odb_stream *stream, const char *buffer, size_t len", + "sig": "git_odb_stream *::const char *::size_t", + "return": { + "type": "int", + "comment": " 0 if the write succeeded; error code otherwise" + }, + "description": "

Write to an odb stream

\n", + "comments": "

This method will fail if the total number of received bytes exceeds the size declared with git_odb_open_wstream()

\n", + "group": "odb" + }, + "git_odb_stream_finalize_write": { + "type": "function", + "file": "odb.h", + "line": 324, + "lineto": 324, + "args": [ + { + "name": "out", + "type": "git_oid *", + "comment": "pointer to store the resulting object's id" + }, + { + "name": "stream", + "type": "git_odb_stream *", + "comment": "the stream" + } + ], + "argline": "git_oid *out, git_odb_stream *stream", + "sig": "git_oid *::git_odb_stream *", + "return": { + "type": "int", + "comment": " 0 on success; an error code otherwise" + }, + "description": "

Finish writing to an odb stream

\n", + "comments": "

The object will take its final name and will be available to the odb.

\n\n

This method will fail if the total number of received bytes differs from the size declared with git_odb_open_wstream()

\n", + "group": "odb" + }, + "git_odb_stream_read": { + "type": "function", + "file": "odb.h", + "line": 331, + "lineto": 331, + "args": [ + { + "name": "stream", + "type": "git_odb_stream *", + "comment": null + }, + { + "name": "buffer", + "type": "char *", + "comment": null + }, + { + "name": "len", + "type": "size_t", + "comment": null + } + ], + "argline": "git_odb_stream *stream, char *buffer, size_t len", + "sig": "git_odb_stream *::char *::size_t", + "return": { + "type": "int", + "comment": null + }, + "description": "

Read from an odb stream

\n", + "comments": "

Most backends don't implement streaming reads

\n", + "group": "odb" + }, + "git_odb_stream_free": { + "type": "function", + "file": "odb.h", + "line": 338, + "lineto": 338, + "args": [ + { + "name": "stream", + "type": "git_odb_stream *", + "comment": "the stream to free" + } + ], + "argline": "git_odb_stream *stream", + "sig": "git_odb_stream *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Free an odb stream

\n", + "comments": "", + "group": "odb" + }, + "git_odb_open_rstream": { + "type": "function", + "file": "odb.h", + "line": 364, + "lineto": 364, + "args": [ + { + "name": "out", + "type": "git_odb_stream **", + "comment": "pointer where to store the stream" + }, + { + "name": "db", + "type": "git_odb *", + "comment": "object database where the stream will read from" + }, + { + "name": "oid", + "type": "const git_oid *", + "comment": "oid of the object the stream will read from" + } + ], + "argline": "git_odb_stream **out, git_odb *db, const git_oid *oid", + "sig": "git_odb_stream **::git_odb *::const git_oid *", + "return": { + "type": "int", + "comment": " 0 if the stream was created; error code otherwise" + }, + "description": "

Open a stream to read an object from the ODB

\n", + "comments": "

Note that most backends do not support streaming reads because they store their objects as compressed/delta'ed blobs.

\n\n

It's recommended to use git_odb_read instead, which is assured to work on all backends.

\n\n

The returned stream will be of type GIT_STREAM_RDONLY and will have the following methods:

\n\n
    - stream->read: read `n` bytes from the stream      - stream->free: free the stream\n
\n\n

The stream must always be free'd or will leak memory.

\n", + "group": "odb" + }, + "git_odb_write_pack": { + "type": "function", + "file": "odb.h", + "line": 384, + "lineto": 388, + "args": [ + { + "name": "out", + "type": "git_odb_writepack **", + "comment": "pointer to the writepack functions" + }, + { + "name": "db", + "type": "git_odb *", + "comment": "object database where the stream will read from" + }, + { + "name": "progress_cb", + "type": "git_transfer_progress_cb", + "comment": "function to call with progress information.\n Be aware that this is called inline with network and indexing operations,\n so performance may be affected." + }, + { + "name": "progress_payload", + "type": "void *", + "comment": "payload for the progress callback" + } + ], + "argline": "git_odb_writepack **out, git_odb *db, git_transfer_progress_cb progress_cb, void *progress_payload", + "sig": "git_odb_writepack **::git_odb *::git_transfer_progress_cb::void *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Open a stream for writing a pack file to the ODB.

\n", + "comments": "

If the ODB layer understands pack files, then the given packfile will likely be streamed directly to disk (and a corresponding index created). If the ODB layer does not understand pack files, the objects will be stored in whatever format the ODB layer uses.

\n", + "group": "odb" + }, + "git_odb_hash": { + "type": "function", + "file": "odb.h", + "line": 402, + "lineto": 402, + "args": [ + { + "name": "out", + "type": "git_oid *", + "comment": "the resulting object-ID." + }, + { + "name": "data", + "type": "const void *", + "comment": "data to hash" + }, + { + "name": "len", + "type": "size_t", + "comment": "size of the data" + }, + { + "name": "type", + "type": "git_otype", + "comment": "of the data to hash" + } + ], + "argline": "git_oid *out, const void *data, size_t len, git_otype type", + "sig": "git_oid *::const void *::size_t::git_otype", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Determine the object-ID (sha1 hash) of a data buffer

\n", + "comments": "

The resulting SHA-1 OID will be the identifier for the data buffer as if the data buffer it were to written to the ODB.

\n", + "group": "odb" + }, + "git_odb_hashfile": { + "type": "function", + "file": "odb.h", + "line": 417, + "lineto": 417, + "args": [ + { + "name": "out", + "type": "git_oid *", + "comment": "oid structure the result is written into." + }, + { + "name": "path", + "type": "const char *", + "comment": "file to read and determine object id for" + }, + { + "name": "type", + "type": "git_otype", + "comment": "the type of the object that will be hashed" + } + ], + "argline": "git_oid *out, const char *path, git_otype type", + "sig": "git_oid *::const char *::git_otype", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Read a file from disk and fill a git_oid with the object id\n that the file would have if it were written to the Object\n Database as an object of the given type (w/o applying filters).\n Similar functionality to git.git's git hash-object without\n the -w flag, however, with the --no-filters flag.\n If you need filters, see git_repository_hashfile.

\n", + "comments": "", + "group": "odb" + }, + "git_odb_object_dup": { + "type": "function", + "file": "odb.h", + "line": 431, + "lineto": 431, + "args": [ + { + "name": "dest", + "type": "git_odb_object **", + "comment": "pointer where to store the copy" + }, + { + "name": "source", + "type": "git_odb_object *", + "comment": "object to copy" + } + ], + "argline": "git_odb_object **dest, git_odb_object *source", + "sig": "git_odb_object **::git_odb_object *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Create a copy of an odb_object

\n", + "comments": "

The returned copy must be manually freed with git_odb_object_free. Note that because of an implementation detail, the returned copy will be the same pointer as source: the object is internally refcounted, so the copy still needs to be freed twice.

\n", + "group": "odb" + }, + "git_odb_object_free": { + "type": "function", + "file": "odb.h", + "line": 441, + "lineto": 441, + "args": [ + { + "name": "object", + "type": "git_odb_object *", + "comment": "object to close" + } + ], + "argline": "git_odb_object *object", + "sig": "git_odb_object *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Close an ODB object

\n", + "comments": "

This method must always be called once a git_odb_object is no longer needed, otherwise memory will leak.

\n", + "group": "odb", + "examples": { + "cat-file.c": [ + "ex/HEAD/cat-file.html#git_odb_object_free-24" + ], + "general.c": [ + "ex/HEAD/general.html#git_odb_object_free-36" + ] + } + }, + "git_odb_object_id": { + "type": "function", + "file": "odb.h", + "line": 451, + "lineto": 451, + "args": [ + { + "name": "object", + "type": "git_odb_object *", + "comment": "the object" + } + ], + "argline": "git_odb_object *object", + "sig": "git_odb_object *", + "return": { + "type": "const git_oid *", + "comment": " a pointer to the OID" + }, + "description": "

Return the OID of an ODB object

\n", + "comments": "

This is the OID from which the object was read from

\n", + "group": "odb" + }, + "git_odb_object_data": { + "type": "function", + "file": "odb.h", + "line": 464, + "lineto": 464, + "args": [ + { + "name": "object", + "type": "git_odb_object *", + "comment": "the object" + } + ], + "argline": "git_odb_object *object", + "sig": "git_odb_object *", + "return": { + "type": "const void *", + "comment": " a pointer to the data" + }, + "description": "

Return the data of an ODB object

\n", + "comments": "

This is the uncompressed, raw data as read from the ODB, without the leading header.

\n\n

This pointer is owned by the object and shall not be free'd.

\n", + "group": "odb", + "examples": { + "general.c": [ + "ex/HEAD/general.html#git_odb_object_data-37" + ] + } + }, + "git_odb_object_size": { + "type": "function", + "file": "odb.h", + "line": 475, + "lineto": 475, + "args": [ + { + "name": "object", + "type": "git_odb_object *", + "comment": "the object" + } + ], + "argline": "git_odb_object *object", + "sig": "git_odb_object *", + "return": { + "type": "size_t", + "comment": " the size" + }, + "description": "

Return the size of an ODB object

\n", + "comments": "

This is the real size of the data buffer, not the actual size of the object.

\n", + "group": "odb", + "examples": { + "cat-file.c": [ + "ex/HEAD/cat-file.html#git_odb_object_size-25" + ], + "general.c": [ + "ex/HEAD/general.html#git_odb_object_size-38" + ] + } + }, + "git_odb_object_type": { + "type": "function", + "file": "odb.h", + "line": 483, + "lineto": 483, + "args": [ + { + "name": "object", + "type": "git_odb_object *", + "comment": "the object" + } + ], + "argline": "git_odb_object *object", + "sig": "git_odb_object *", + "return": { + "type": "git_otype", + "comment": " the type" + }, + "description": "

Return the type of an ODB object

\n", + "comments": "", + "group": "odb", + "examples": { + "general.c": [ + "ex/HEAD/general.html#git_odb_object_type-39" + ] + } + }, + "git_odb_add_backend": { + "type": "function", + "file": "odb.h", + "line": 498, + "lineto": 498, + "args": [ + { + "name": "odb", + "type": "git_odb *", + "comment": "database to add the backend to" + }, + { + "name": "backend", + "type": "git_odb_backend *", + "comment": "pointer to a git_odb_backend instance" + }, + { + "name": "priority", + "type": "int", + "comment": "Value for ordering the backends queue" + } + ], + "argline": "git_odb *odb, git_odb_backend *backend, int priority", + "sig": "git_odb *::git_odb_backend *::int", + "return": { + "type": "int", + "comment": " 0 on success; error code otherwise" + }, + "description": "

Add a custom backend to an existing Object DB

\n", + "comments": "

The backends are checked in relative ordering, based on the value of the priority parameter.

\n\n

Read for more information.

\n", + "group": "odb" + }, + "git_odb_add_alternate": { + "type": "function", + "file": "odb.h", + "line": 519, + "lineto": 519, + "args": [ + { + "name": "odb", + "type": "git_odb *", + "comment": "database to add the backend to" + }, + { + "name": "backend", + "type": "git_odb_backend *", + "comment": "pointer to a git_odb_backend instance" + }, + { + "name": "priority", + "type": "int", + "comment": "Value for ordering the backends queue" + } + ], + "argline": "git_odb *odb, git_odb_backend *backend, int priority", + "sig": "git_odb *::git_odb_backend *::int", + "return": { + "type": "int", + "comment": " 0 on success; error code otherwise" + }, + "description": "

Add a custom backend to an existing Object DB; this\n backend will work as an alternate.

\n", + "comments": "

Alternate backends are always checked for objects after all the main backends have been exhausted.

\n\n

The backends are checked in relative ordering, based on the value of the priority parameter.

\n\n

Writing is disabled on alternate backends.

\n\n

Read for more information.

\n", + "group": "odb" + }, + "git_odb_num_backends": { + "type": "function", + "file": "odb.h", + "line": 527, + "lineto": 527, + "args": [ + { + "name": "odb", + "type": "git_odb *", + "comment": "object database" + } + ], + "argline": "git_odb *odb", + "sig": "git_odb *", + "return": { + "type": "size_t", + "comment": " number of backends in the ODB" + }, + "description": "

Get the number of ODB backend objects

\n", + "comments": "", + "group": "odb" + }, + "git_odb_get_backend": { + "type": "function", + "file": "odb.h", + "line": 537, + "lineto": 537, + "args": [ + { + "name": "out", + "type": "git_odb_backend **", + "comment": "output pointer to ODB backend at pos" + }, + { + "name": "odb", + "type": "git_odb *", + "comment": "object database" + }, + { + "name": "pos", + "type": "size_t", + "comment": "index into object database backend list" + } + ], + "argline": "git_odb_backend **out, git_odb *odb, size_t pos", + "sig": "git_odb_backend **::git_odb *::size_t", + "return": { + "type": "int", + "comment": " 0 on success; GIT_ENOTFOUND if pos is invalid; other errors \n<\n 0" + }, + "description": "

Lookup an ODB backend object by index

\n", + "comments": "", + "group": "odb" + }, + "git_odb_backend_pack": { + "type": "function", + "file": "odb_backend.h", + "line": 34, + "lineto": 34, + "args": [ + { + "name": "out", + "type": "git_odb_backend **", + "comment": "location to store the odb backend pointer" + }, + { + "name": "objects_dir", + "type": "const char *", + "comment": "the Git repository's objects directory" + } + ], + "argline": "git_odb_backend **out, const char *objects_dir", + "sig": "git_odb_backend **::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Create a backend for the packfiles.

\n", + "comments": "", + "group": "odb" + }, + "git_odb_backend_loose": { + "type": "function", + "file": "odb_backend.h", + "line": 48, + "lineto": 54, + "args": [ + { + "name": "out", + "type": "git_odb_backend **", + "comment": "location to store the odb backend pointer" + }, + { + "name": "objects_dir", + "type": "const char *", + "comment": "the Git repository's objects directory" + }, + { + "name": "compression_level", + "type": "int", + "comment": "zlib compression level to use" + }, + { + "name": "do_fsync", + "type": "int", + "comment": "whether to do an fsync() after writing (currently ignored)" + }, + { + "name": "dir_mode", + "type": "unsigned int", + "comment": "permissions to use creating a directory or 0 for defaults" + }, + { + "name": "file_mode", + "type": "unsigned int", + "comment": "permissions to use creating a file or 0 for defaults" + } + ], + "argline": "git_odb_backend **out, const char *objects_dir, int compression_level, int do_fsync, unsigned int dir_mode, unsigned int file_mode", + "sig": "git_odb_backend **::const char *::int::int::unsigned int::unsigned int", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Create a backend for loose objects

\n", + "comments": "", + "group": "odb" + }, + "git_odb_backend_one_pack": { + "type": "function", + "file": "odb_backend.h", + "line": 67, + "lineto": 67, + "args": [ + { + "name": "out", + "type": "git_odb_backend **", + "comment": "location to store the odb backend pointer" + }, + { + "name": "index_file", + "type": "const char *", + "comment": "path to the packfile's .idx file" + } + ], + "argline": "git_odb_backend **out, const char *index_file", + "sig": "git_odb_backend **::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Create a backend out of a single packfile

\n", + "comments": "

This can be useful for inspecting the contents of a single packfile.

\n", + "group": "odb" + }, + "git_oid_fromstr": { + "type": "function", + "file": "oid.h", + "line": 47, + "lineto": 47, + "args": [ + { + "name": "out", + "type": "git_oid *", + "comment": "oid structure the result is written into." + }, + { + "name": "str", + "type": "const char *", + "comment": "input hex string; must be pointing at the start of\n\t\tthe hex sequence and have at least the number of bytes\n\t\tneeded for an oid encoded in hex (40 bytes)." + } + ], + "argline": "git_oid *out, const char *str", + "sig": "git_oid *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Parse a hex formatted object id into a git_oid.

\n", + "comments": "", + "group": "oid", + "examples": { + "general.c": [ + "ex/HEAD/general.html#git_oid_fromstr-40", + "ex/HEAD/general.html#git_oid_fromstr-41", + "ex/HEAD/general.html#git_oid_fromstr-42", + "ex/HEAD/general.html#git_oid_fromstr-43", + "ex/HEAD/general.html#git_oid_fromstr-44", + "ex/HEAD/general.html#git_oid_fromstr-45", + "ex/HEAD/general.html#git_oid_fromstr-46", + "ex/HEAD/general.html#git_oid_fromstr-47" + ] + } + }, + "git_oid_fromstrp": { + "type": "function", + "file": "oid.h", + "line": 57, + "lineto": 57, + "args": [ + { + "name": "out", + "type": "git_oid *", + "comment": "oid structure the result is written into." + }, + { + "name": "str", + "type": "const char *", + "comment": "input hex string; must be at least 4 characters\n long and null-terminated." + } + ], + "argline": "git_oid *out, const char *str", + "sig": "git_oid *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Parse a hex formatted null-terminated string into a git_oid.

\n", + "comments": "", + "group": "oid" + }, + "git_oid_fromstrn": { + "type": "function", + "file": "oid.h", + "line": 70, + "lineto": 70, + "args": [ + { + "name": "out", + "type": "git_oid *", + "comment": "oid structure the result is written into." + }, + { + "name": "str", + "type": "const char *", + "comment": "input hex string of at least size `length`" + }, + { + "name": "length", + "type": "size_t", + "comment": "length of the input string" + } + ], + "argline": "git_oid *out, const char *str, size_t length", + "sig": "git_oid *::const char *::size_t", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Parse N characters of a hex formatted object id into a git_oid

\n", + "comments": "

If N is odd, N-1 characters will be parsed instead. The remaining space in the git_oid will be set to zero.

\n", + "group": "oid" + }, + "git_oid_fromraw": { + "type": "function", + "file": "oid.h", + "line": 78, + "lineto": 78, + "args": [ + { + "name": "out", + "type": "git_oid *", + "comment": "oid structure the result is written into." + }, + { + "name": "raw", + "type": "const unsigned char *", + "comment": "the raw input bytes to be copied." + } + ], + "argline": "git_oid *out, const unsigned char *raw", + "sig": "git_oid *::const unsigned char *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Copy an already raw oid into a git_oid structure.

\n", + "comments": "", + "group": "oid" + }, + "git_oid_fmt": { + "type": "function", + "file": "oid.h", + "line": 90, + "lineto": 90, + "args": [ + { + "name": "out", + "type": "char *", + "comment": "output hex string; must be pointing at the start of\n\t\tthe hex sequence and have at least the number of bytes\n\t\tneeded for an oid encoded in hex (40 bytes). Only the\n\t\toid digits are written; a '\n\\\n0' terminator must be added\n\t\tby the caller if it is required." + }, + { + "name": "id", + "type": "const git_oid *", + "comment": "oid structure to format." + } + ], + "argline": "char *out, const git_oid *id", + "sig": "char *::const git_oid *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Format a git_oid into a hex string.

\n", + "comments": "", + "group": "oid", + "examples": { + "general.c": [ + "ex/HEAD/general.html#git_oid_fmt-48", + "ex/HEAD/general.html#git_oid_fmt-49", + "ex/HEAD/general.html#git_oid_fmt-50", + "ex/HEAD/general.html#git_oid_fmt-51", + "ex/HEAD/general.html#git_oid_fmt-52" + ], + "network/fetch.c": [ + "ex/HEAD/network/fetch.html#git_oid_fmt-1", + "ex/HEAD/network/fetch.html#git_oid_fmt-2" + ], + "network/index-pack.c": [ + "ex/HEAD/network/index-pack.html#git_oid_fmt-6" + ], + "network/ls-remote.c": [ + "ex/HEAD/network/ls-remote.html#git_oid_fmt-1" + ] + } + }, + "git_oid_nfmt": { + "type": "function", + "file": "oid.h", + "line": 101, + "lineto": 101, + "args": [ + { + "name": "out", + "type": "char *", + "comment": "output hex string; you say how many bytes to write.\n\t\tIf the number of bytes is > GIT_OID_HEXSZ, extra bytes\n\t\twill be zeroed; if not, a '\n\\\n0' terminator is NOT added." + }, + { + "name": "n", + "type": "size_t", + "comment": "number of characters to write into out string" + }, + { + "name": "id", + "type": "const git_oid *", + "comment": "oid structure to format." + } + ], + "argline": "char *out, size_t n, const git_oid *id", + "sig": "char *::size_t::const git_oid *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Format a git_oid into a partial hex string.

\n", + "comments": "", + "group": "oid" + }, + "git_oid_pathfmt": { + "type": "function", + "file": "oid.h", + "line": 116, + "lineto": 116, + "args": [ + { + "name": "out", + "type": "char *", + "comment": "output hex string; must be pointing at the start of\n\t\tthe hex sequence and have at least the number of bytes\n\t\tneeded for an oid encoded in hex (41 bytes). Only the\n\t\toid digits are written; a '\n\\\n0' terminator must be added\n\t\tby the caller if it is required." + }, + { + "name": "id", + "type": "const git_oid *", + "comment": "oid structure to format." + } + ], + "argline": "char *out, const git_oid *id", + "sig": "char *::const git_oid *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Format a git_oid into a loose-object path string.

\n", + "comments": "

The resulting string is "aa/...", where "aa" is the first two hex digits of the oid and "..." is the remaining 38 digits.

\n", + "group": "oid" + }, + "git_oid_tostr_s": { + "type": "function", + "file": "oid.h", + "line": 129, + "lineto": 129, + "args": [ + { + "name": "oid", + "type": "const git_oid *", + "comment": "The oid structure to format" + } + ], + "argline": "const git_oid *oid", + "sig": "const git_oid *", + "return": { + "type": "char *", + "comment": " the c-string" + }, + "description": "

Format a git_oid into a statically allocated c-string.

\n", + "comments": "

The c-string is owned by the library and should not be freed by the user. If libgit2 is built with thread support, the string will be stored in TLS (i.e. one buffer per thread) to allow for concurrent calls of the function.

\n", + "group": "oid" + }, + "git_oid_tostr": { + "type": "function", + "file": "oid.h", + "line": 148, + "lineto": 148, + "args": [ + { + "name": "out", + "type": "char *", + "comment": "the buffer into which the oid string is output." + }, + { + "name": "n", + "type": "size_t", + "comment": "the size of the out buffer." + }, + { + "name": "id", + "type": "const git_oid *", + "comment": "the oid structure to format." + } + ], + "argline": "char *out, size_t n, const git_oid *id", + "sig": "char *::size_t::const git_oid *", + "return": { + "type": "char *", + "comment": " the out buffer pointer, assuming no input parameter\n\t\t\terrors, otherwise a pointer to an empty string." + }, + "description": "

Format a git_oid into a buffer as a hex format c-string.

\n", + "comments": "

If the buffer is smaller than GIT_OID_HEXSZ+1, then the resulting oid c-string will be truncated to n-1 characters (but will still be NUL-byte terminated).

\n\n

If there are any input parameter errors (out == NULL, n == 0, oid == NULL), then a pointer to an empty string is returned, so that the return value can always be printed.

\n", + "group": "oid", + "examples": { + "blame.c": [ + "ex/HEAD/blame.html#git_oid_tostr-18", + "ex/HEAD/blame.html#git_oid_tostr-19" + ], + "cat-file.c": [ + "ex/HEAD/cat-file.html#git_oid_tostr-26", + "ex/HEAD/cat-file.html#git_oid_tostr-27", + "ex/HEAD/cat-file.html#git_oid_tostr-28", + "ex/HEAD/cat-file.html#git_oid_tostr-29", + "ex/HEAD/cat-file.html#git_oid_tostr-30" + ], + "log.c": [ + "ex/HEAD/log.html#git_oid_tostr-39", + "ex/HEAD/log.html#git_oid_tostr-40" + ], + "rev-parse.c": [ + "ex/HEAD/rev-parse.html#git_oid_tostr-12", + "ex/HEAD/rev-parse.html#git_oid_tostr-13", + "ex/HEAD/rev-parse.html#git_oid_tostr-14", + "ex/HEAD/rev-parse.html#git_oid_tostr-15" + ] + } + }, + "git_oid_cpy": { + "type": "function", + "file": "oid.h", + "line": 156, + "lineto": 156, + "args": [ + { + "name": "out", + "type": "git_oid *", + "comment": "oid structure the result is written into." + }, + { + "name": "src", + "type": "const git_oid *", + "comment": "oid structure to copy from." + } + ], + "argline": "git_oid *out, const git_oid *src", + "sig": "git_oid *::const git_oid *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Copy an oid from one structure to another.

\n", + "comments": "", + "group": "oid", + "examples": { + "blame.c": [ + "ex/HEAD/blame.html#git_oid_cpy-20", + "ex/HEAD/blame.html#git_oid_cpy-21", + "ex/HEAD/blame.html#git_oid_cpy-22" + ] + } + }, + "git_oid_cmp": { + "type": "function", + "file": "oid.h", + "line": 165, + "lineto": 165, + "args": [ + { + "name": "a", + "type": "const git_oid *", + "comment": "first oid structure." + }, + { + "name": "b", + "type": "const git_oid *", + "comment": "second oid structure." + } + ], + "argline": "const git_oid *a, const git_oid *b", + "sig": "const git_oid *::const git_oid *", + "return": { + "type": "int", + "comment": " \n<\n0, 0, >0 if a \n<\n b, a == b, a > b." + }, + "description": "

Compare two oid structures.

\n", + "comments": "", + "group": "oid" + }, + "git_oid_equal": { + "type": "function", + "file": "oid.h", + "line": 174, + "lineto": 174, + "args": [ + { + "name": "a", + "type": "const git_oid *", + "comment": "first oid structure." + }, + { + "name": "b", + "type": "const git_oid *", + "comment": "second oid structure." + } + ], + "argline": "const git_oid *a, const git_oid *b", + "sig": "const git_oid *::const git_oid *", + "return": { + "type": "int", + "comment": " true if equal, false otherwise" + }, + "description": "

Compare two oid structures for equality

\n", + "comments": "", + "group": "oid" + }, + "git_oid_ncmp": { + "type": "function", + "file": "oid.h", + "line": 185, + "lineto": 185, + "args": [ + { + "name": "a", + "type": "const git_oid *", + "comment": "first oid structure." + }, + { + "name": "b", + "type": "const git_oid *", + "comment": "second oid structure." + }, + { + "name": "len", + "type": "size_t", + "comment": "the number of hex chars to compare" + } + ], + "argline": "const git_oid *a, const git_oid *b, size_t len", + "sig": "const git_oid *::const git_oid *::size_t", + "return": { + "type": "int", + "comment": " 0 in case of a match" + }, + "description": "

Compare the first 'len' hexadecimal characters (packets of 4 bits)\n of two oid structures.

\n", + "comments": "", + "group": "oid" + }, + "git_oid_streq": { + "type": "function", + "file": "oid.h", + "line": 194, + "lineto": 194, + "args": [ + { + "name": "id", + "type": "const git_oid *", + "comment": "oid structure." + }, + { + "name": "str", + "type": "const char *", + "comment": "input hex string of an object id." + } + ], + "argline": "const git_oid *id, const char *str", + "sig": "const git_oid *::const char *", + "return": { + "type": "int", + "comment": " 0 in case of a match, -1 otherwise." + }, + "description": "

Check if an oid equals an hex formatted object id.

\n", + "comments": "", + "group": "oid" + }, + "git_oid_strcmp": { + "type": "function", + "file": "oid.h", + "line": 204, + "lineto": 204, + "args": [ + { + "name": "id", + "type": "const git_oid *", + "comment": "oid structure." + }, + { + "name": "str", + "type": "const char *", + "comment": "input hex string of an object id." + } + ], + "argline": "const git_oid *id, const char *str", + "sig": "const git_oid *::const char *", + "return": { + "type": "int", + "comment": " -1 if str is not valid, \n<\n0 if id sorts before str,\n 0 if id matches str, >0 if id sorts after str." + }, + "description": "

Compare an oid to an hex formatted object id.

\n", + "comments": "", + "group": "oid" + }, + "git_oid_iszero": { + "type": "function", + "file": "oid.h", + "line": 211, + "lineto": 211, + "args": [ + { + "name": "id", + "type": "const git_oid *", + "comment": null + } + ], + "argline": "const git_oid *id", + "sig": "const git_oid *", + "return": { + "type": "int", + "comment": " 1 if all zeros, 0 otherwise." + }, + "description": "

Check is an oid is all zeros.

\n", + "comments": "", + "group": "oid", + "examples": { + "blame.c": [ + "ex/HEAD/blame.html#git_oid_iszero-23" + ], + "network/fetch.c": [ + "ex/HEAD/network/fetch.html#git_oid_iszero-3" + ] + } + }, + "git_oid_shorten_new": { + "type": "function", + "file": "oid.h", + "line": 232, + "lineto": 232, + "args": [ + { + "name": "min_length", + "type": "size_t", + "comment": "The minimal length for all identifiers,\n\t\twhich will be used even if shorter OIDs would still\n\t\tbe unique." + } + ], + "argline": "size_t min_length", + "sig": "size_t", + "return": { + "type": "git_oid_shorten *", + "comment": " a `git_oid_shorten` instance, NULL if OOM" + }, + "description": "

Create a new OID shortener.

\n", + "comments": "

The OID shortener is used to process a list of OIDs in text form and return the shortest length that would uniquely identify all of them.

\n\n

E.g. look at the result of git log --abbrev.

\n", + "group": "oid" + }, + "git_oid_shorten_add": { + "type": "function", + "file": "oid.h", + "line": 258, + "lineto": 258, + "args": [ + { + "name": "os", + "type": "git_oid_shorten *", + "comment": "a `git_oid_shorten` instance" + }, + { + "name": "text_id", + "type": "const char *", + "comment": "an OID in text form" + } + ], + "argline": "git_oid_shorten *os, const char *text_id", + "sig": "git_oid_shorten *::const char *", + "return": { + "type": "int", + "comment": " the minimal length to uniquely identify all OIDs\n\t\tadded so far to the set; or an error code (\n<\n0) if an\n\t\terror occurs." + }, + "description": "

Add a new OID to set of shortened OIDs and calculate\n the minimal length to uniquely identify all the OIDs in\n the set.

\n", + "comments": "

The OID is expected to be a 40-char hexadecimal string. The OID is owned by the user and will not be modified or freed.

\n\n

For performance reasons, there is a hard-limit of how many OIDs can be added to a single set (around ~32000, assuming a mostly randomized distribution), which should be enough for any kind of program, and keeps the algorithm fast and memory-efficient.

\n\n

Attempting to add more than those OIDs will result in a GITERR_INVALID error

\n", + "group": "oid" + }, + "git_oid_shorten_free": { + "type": "function", + "file": "oid.h", + "line": 265, + "lineto": 265, + "args": [ + { + "name": "os", + "type": "git_oid_shorten *", + "comment": "a `git_oid_shorten` instance" + } + ], + "argline": "git_oid_shorten *os", + "sig": "git_oid_shorten *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Free an OID shortener instance

\n", + "comments": "", + "group": "oid" + }, + "git_oidarray_free": { + "type": "function", + "file": "oidarray.h", + "line": 34, + "lineto": 34, + "args": [ + { + "name": "array", + "type": "git_oidarray *", + "comment": "git_oidarray from which to free oid data" + } + ], + "argline": "git_oidarray *array", + "sig": "git_oidarray *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Free the OID array

\n", + "comments": "

This method must (and must only) be called on git_oidarray objects where the array is allocated by the library. Not doing so, will result in a memory leak.

\n\n

This does not free the git_oidarray itself, since the library will never allocate that object directly itself (it is more commonly embedded inside another struct or created on the stack).

\n", + "group": "oidarray" + }, + "git_packbuilder_new": { + "type": "function", + "file": "pack.h", + "line": 64, + "lineto": 64, + "args": [ + { + "name": "out", + "type": "git_packbuilder **", + "comment": "The new packbuilder object" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "The repository" + } + ], + "argline": "git_packbuilder **out, git_repository *repo", + "sig": "git_packbuilder **::git_repository *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Initialize a new packbuilder

\n", + "comments": "", + "group": "packbuilder" + }, + "git_packbuilder_set_threads": { + "type": "function", + "file": "pack.h", + "line": 77, + "lineto": 77, + "args": [ + { + "name": "pb", + "type": "git_packbuilder *", + "comment": "The packbuilder" + }, + { + "name": "n", + "type": "unsigned int", + "comment": "Number of threads to spawn" + } + ], + "argline": "git_packbuilder *pb, unsigned int n", + "sig": "git_packbuilder *::unsigned int", + "return": { + "type": "unsigned int", + "comment": " number of actual threads to be used" + }, + "description": "

Set number of threads to spawn

\n", + "comments": "

By default, libgit2 won't spawn any threads at all; when set to 0, libgit2 will autodetect the number of CPUs.

\n", + "group": "packbuilder" + }, + "git_packbuilder_insert": { + "type": "function", + "file": "pack.h", + "line": 91, + "lineto": 91, + "args": [ + { + "name": "pb", + "type": "git_packbuilder *", + "comment": "The packbuilder" + }, + { + "name": "id", + "type": "const git_oid *", + "comment": "The oid of the commit" + }, + { + "name": "name", + "type": "const char *", + "comment": "The name; might be NULL" + } + ], + "argline": "git_packbuilder *pb, const git_oid *id, const char *name", + "sig": "git_packbuilder *::const git_oid *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Insert a single object

\n", + "comments": "

For an optimal pack it's mandatory to insert objects in recency order, commits followed by trees and blobs.

\n", + "group": "packbuilder" + }, + "git_packbuilder_insert_tree": { + "type": "function", + "file": "pack.h", + "line": 103, + "lineto": 103, + "args": [ + { + "name": "pb", + "type": "git_packbuilder *", + "comment": "The packbuilder" + }, + { + "name": "id", + "type": "const git_oid *", + "comment": "The oid of the root tree" + } + ], + "argline": "git_packbuilder *pb, const git_oid *id", + "sig": "git_packbuilder *::const git_oid *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Insert a root tree object

\n", + "comments": "

This will add the tree as well as all referenced trees and blobs.

\n", + "group": "packbuilder" + }, + "git_packbuilder_insert_commit": { + "type": "function", + "file": "pack.h", + "line": 115, + "lineto": 115, + "args": [ + { + "name": "pb", + "type": "git_packbuilder *", + "comment": "The packbuilder" + }, + { + "name": "id", + "type": "const git_oid *", + "comment": "The oid of the commit" + } + ], + "argline": "git_packbuilder *pb, const git_oid *id", + "sig": "git_packbuilder *::const git_oid *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Insert a commit object

\n", + "comments": "

This will add a commit as well as the completed referenced tree.

\n", + "group": "packbuilder" + }, + "git_packbuilder_insert_walk": { + "type": "function", + "file": "pack.h", + "line": 128, + "lineto": 128, + "args": [ + { + "name": "pb", + "type": "git_packbuilder *", + "comment": "the packbuilder" + }, + { + "name": "walk", + "type": "git_revwalk *", + "comment": "the revwalk to use to fill the packbuilder" + } + ], + "argline": "git_packbuilder *pb, git_revwalk *walk", + "sig": "git_packbuilder *::git_revwalk *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Insert objects as given by the walk

\n", + "comments": "

Those commits and all objects they reference will be inserted into the packbuilder.

\n", + "group": "packbuilder" + }, + "git_packbuilder_insert_recur": { + "type": "function", + "file": "pack.h", + "line": 140, + "lineto": 140, + "args": [ + { + "name": "pb", + "type": "git_packbuilder *", + "comment": "the packbuilder" + }, + { + "name": "id", + "type": "const git_oid *", + "comment": "the id of the root object to insert" + }, + { + "name": "name", + "type": "const char *", + "comment": "optional name for the object" + } + ], + "argline": "git_packbuilder *pb, const git_oid *id, const char *name", + "sig": "git_packbuilder *::const git_oid *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Recursively insert an object and its referenced objects

\n", + "comments": "

Insert the object as well as any object it references.

\n", + "group": "packbuilder" + }, + "git_packbuilder_write": { + "type": "function", + "file": "pack.h", + "line": 164, + "lineto": 169, + "args": [ + { + "name": "pb", + "type": "git_packbuilder *", + "comment": "The packbuilder" + }, + { + "name": "path", + "type": "const char *", + "comment": "to the directory where the packfile and index should be stored" + }, + { + "name": "mode", + "type": "unsigned int", + "comment": "permissions to use creating a packfile or 0 for defaults" + }, + { + "name": "progress_cb", + "type": "git_transfer_progress_cb", + "comment": "function to call with progress information from the indexer (optional)" + }, + { + "name": "progress_cb_payload", + "type": "void *", + "comment": "payload for the progress callback (optional)" + } + ], + "argline": "git_packbuilder *pb, const char *path, unsigned int mode, git_transfer_progress_cb progress_cb, void *progress_cb_payload", + "sig": "git_packbuilder *::const char *::unsigned int::git_transfer_progress_cb::void *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Write the new pack and corresponding index file to path.

\n", + "comments": "", + "group": "packbuilder" + }, + "git_packbuilder_hash": { + "type": "function", + "file": "pack.h", + "line": 179, + "lineto": 179, + "args": [ + { + "name": "pb", + "type": "git_packbuilder *", + "comment": "The packbuilder object" + } + ], + "argline": "git_packbuilder *pb", + "sig": "git_packbuilder *", + "return": { + "type": "const git_oid *", + "comment": null + }, + "description": "

Get the packfile's hash

\n", + "comments": "

A packfile's name is derived from the sorted hashing of all object names. This is only correct after the packfile has been written.

\n", + "group": "packbuilder" + }, + "git_packbuilder_foreach": { + "type": "function", + "file": "pack.h", + "line": 191, + "lineto": 191, + "args": [ + { + "name": "pb", + "type": "git_packbuilder *", + "comment": "the packbuilder" + }, + { + "name": "cb", + "type": "git_packbuilder_foreach_cb", + "comment": "the callback to call with each packed object's buffer" + }, + { + "name": "payload", + "type": "void *", + "comment": "the callback's data" + } + ], + "argline": "git_packbuilder *pb, git_packbuilder_foreach_cb cb, void *payload", + "sig": "git_packbuilder *::git_packbuilder_foreach_cb::void *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Create the new pack and pass each object to the callback

\n", + "comments": "", + "group": "packbuilder" + }, + "git_packbuilder_object_count": { + "type": "function", + "file": "pack.h", + "line": 199, + "lineto": 199, + "args": [ + { + "name": "pb", + "type": "git_packbuilder *", + "comment": "the packbuilder" + } + ], + "argline": "git_packbuilder *pb", + "sig": "git_packbuilder *", + "return": { + "type": "uint32_t", + "comment": " the number of objects in the packfile" + }, + "description": "

Get the total number of objects the packbuilder will write out

\n", + "comments": "", + "group": "packbuilder" + }, + "git_packbuilder_written": { + "type": "function", + "file": "pack.h", + "line": 207, + "lineto": 207, + "args": [ + { + "name": "pb", + "type": "git_packbuilder *", + "comment": "the packbuilder" + } + ], + "argline": "git_packbuilder *pb", + "sig": "git_packbuilder *", + "return": { + "type": "uint32_t", + "comment": " the number of objects which have already been written" + }, + "description": "

Get the number of objects the packbuilder has already written out

\n", + "comments": "", + "group": "packbuilder" + }, + "git_packbuilder_set_callbacks": { + "type": "function", + "file": "pack.h", + "line": 226, + "lineto": 229, + "args": [ + { + "name": "pb", + "type": "git_packbuilder *", + "comment": "The packbuilder object" + }, + { + "name": "progress_cb", + "type": "git_packbuilder_progress", + "comment": "Function to call with progress information during\n pack building. Be aware that this is called inline with pack building\n operations, so performance may be affected." + }, + { + "name": "progress_cb_payload", + "type": "void *", + "comment": "Payload for progress callback." + } + ], + "argline": "git_packbuilder *pb, git_packbuilder_progress progress_cb, void *progress_cb_payload", + "sig": "git_packbuilder *::git_packbuilder_progress::void *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Set the callbacks for a packbuilder

\n", + "comments": "", + "group": "packbuilder" + }, + "git_packbuilder_free": { + "type": "function", + "file": "pack.h", + "line": 236, + "lineto": 236, + "args": [ + { + "name": "pb", + "type": "git_packbuilder *", + "comment": "The packbuilder" + } + ], + "argline": "git_packbuilder *pb", + "sig": "git_packbuilder *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Free the packbuilder and all associated data

\n", + "comments": "", + "group": "packbuilder" + }, + "git_patch_from_diff": { + "type": "function", + "file": "patch.h", + "line": 51, + "lineto": 52, + "args": [ + { + "name": "out", + "type": "git_patch **", + "comment": "Output parameter for the delta patch object" + }, + { + "name": "diff", + "type": "git_diff *", + "comment": "Diff list object" + }, + { + "name": "idx", + "type": "size_t", + "comment": "Index into diff list" + } + ], + "argline": "git_patch **out, git_diff *diff, size_t idx", + "sig": "git_patch **::git_diff *::size_t", + "return": { + "type": "int", + "comment": " 0 on success, other value \n<\n 0 on error" + }, + "description": "

Return a patch for an entry in the diff list.

\n", + "comments": "

The git_patch is a newly created object contains the text diffs for the delta. You have to call git_patch_free() when you are done with it. You can use the patch object to loop over all the hunks and lines in the diff of the one delta.

\n\n

For an unchanged file or a binary file, no git_patch will be created, the output will be set to NULL, and the binary flag will be set true in the git_diff_delta structure.

\n\n

It is okay to pass NULL for either of the output parameters; if you pass NULL for the git_patch, then the text diff will not be calculated.

\n", + "group": "patch" + }, + "git_patch_from_blobs": { + "type": "function", + "file": "patch.h", + "line": 70, + "lineto": 76, + "args": [ + { + "name": "out", + "type": "git_patch **", + "comment": "The generated patch; NULL on error" + }, + { + "name": "old_blob", + "type": "const git_blob *", + "comment": "Blob for old side of diff, or NULL for empty blob" + }, + { + "name": "old_as_path", + "type": "const char *", + "comment": "Treat old blob as if it had this filename; can be NULL" + }, + { + "name": "new_blob", + "type": "const git_blob *", + "comment": "Blob for new side of diff, or NULL for empty blob" + }, + { + "name": "new_as_path", + "type": "const char *", + "comment": "Treat new blob as if it had this filename; can be NULL" + }, + { + "name": "opts", + "type": "const git_diff_options *", + "comment": "Options for diff, or NULL for default options" + } + ], + "argline": "git_patch **out, const git_blob *old_blob, const char *old_as_path, const git_blob *new_blob, const char *new_as_path, const git_diff_options *opts", + "sig": "git_patch **::const git_blob *::const char *::const git_blob *::const char *::const git_diff_options *", + "return": { + "type": "int", + "comment": " 0 on success or error code \n<\n 0" + }, + "description": "

Directly generate a patch from the difference between two blobs.

\n", + "comments": "

This is just like git_diff_blobs() except it generates a patch object for the difference instead of directly making callbacks. You can use the standard git_patch accessor functions to read the patch data, and you must call git_patch_free() on the patch when done.

\n", + "group": "patch" + }, + "git_patch_from_blob_and_buffer": { + "type": "function", + "file": "patch.h", + "line": 95, + "lineto": 102, + "args": [ + { + "name": "out", + "type": "git_patch **", + "comment": "The generated patch; NULL on error" + }, + { + "name": "old_blob", + "type": "const git_blob *", + "comment": "Blob for old side of diff, or NULL for empty blob" + }, + { + "name": "old_as_path", + "type": "const char *", + "comment": "Treat old blob as if it had this filename; can be NULL" + }, + { + "name": "buffer", + "type": "const char *", + "comment": "Raw data for new side of diff, or NULL for empty" + }, + { + "name": "buffer_len", + "type": "size_t", + "comment": "Length of raw data for new side of diff" + }, + { + "name": "buffer_as_path", + "type": "const char *", + "comment": "Treat buffer as if it had this filename; can be NULL" + }, + { + "name": "opts", + "type": "const git_diff_options *", + "comment": "Options for diff, or NULL for default options" + } + ], + "argline": "git_patch **out, const git_blob *old_blob, const char *old_as_path, const char *buffer, size_t buffer_len, const char *buffer_as_path, const git_diff_options *opts", + "sig": "git_patch **::const git_blob *::const char *::const char *::size_t::const char *::const git_diff_options *", + "return": { + "type": "int", + "comment": " 0 on success or error code \n<\n 0" + }, + "description": "

Directly generate a patch from the difference between a blob and a buffer.

\n", + "comments": "

This is just like git_diff_blob_to_buffer() except it generates a patch object for the difference instead of directly making callbacks. You can use the standard git_patch accessor functions to read the patch data, and you must call git_patch_free() on the patch when done.

\n", + "group": "patch" + }, + "git_patch_from_buffers": { + "type": "function", + "file": "patch.h", + "line": 122, + "lineto": 130, + "args": [ + { + "name": "out", + "type": "git_patch **", + "comment": "The generated patch; NULL on error" + }, + { + "name": "old_buffer", + "type": "const void *", + "comment": "Raw data for old side of diff, or NULL for empty" + }, + { + "name": "old_len", + "type": "size_t", + "comment": "Length of the raw data for old side of the diff" + }, + { + "name": "old_as_path", + "type": "const char *", + "comment": "Treat old buffer as if it had this filename; can be NULL" + }, + { + "name": "new_buffer", + "type": "const char *", + "comment": "Raw data for new side of diff, or NULL for empty" + }, + { + "name": "new_len", + "type": "size_t", + "comment": "Length of raw data for new side of diff" + }, + { + "name": "new_as_path", + "type": "const char *", + "comment": "Treat buffer as if it had this filename; can be NULL" + }, + { + "name": "opts", + "type": "const git_diff_options *", + "comment": "Options for diff, or NULL for default options" + } + ], + "argline": "git_patch **out, const void *old_buffer, size_t old_len, const char *old_as_path, const char *new_buffer, size_t new_len, const char *new_as_path, const git_diff_options *opts", + "sig": "git_patch **::const void *::size_t::const char *::const char *::size_t::const char *::const git_diff_options *", + "return": { + "type": "int", + "comment": " 0 on success or error code \n<\n 0" + }, + "description": "

Directly generate a patch from the difference between two buffers.

\n", + "comments": "

This is just like git_diff_buffers() except it generates a patch object for the difference instead of directly making callbacks. You can use the standard git_patch accessor functions to read the patch data, and you must call git_patch_free() on the patch when done.

\n", + "group": "patch" + }, + "git_patch_free": { + "type": "function", + "file": "patch.h", + "line": 135, + "lineto": 135, + "args": [ + { + "name": "patch", + "type": "git_patch *", + "comment": null + } + ], + "argline": "git_patch *patch", + "sig": "git_patch *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Free a git_patch object.

\n", + "comments": "", + "group": "patch" + }, + "git_patch_get_delta": { + "type": "function", + "file": "patch.h", + "line": 141, + "lineto": 141, + "args": [ + { + "name": "patch", + "type": "const git_patch *", + "comment": null + } + ], + "argline": "const git_patch *patch", + "sig": "const git_patch *", + "return": { + "type": "const git_diff_delta *", + "comment": null + }, + "description": "

Get the delta associated with a patch. This delta points to internal\n data and you do not have to release it when you are done with it.

\n", + "comments": "", + "group": "patch" + }, + "git_patch_num_hunks": { + "type": "function", + "file": "patch.h", + "line": 146, + "lineto": 146, + "args": [ + { + "name": "patch", + "type": "const git_patch *", + "comment": null + } + ], + "argline": "const git_patch *patch", + "sig": "const git_patch *", + "return": { + "type": "size_t", + "comment": null + }, + "description": "

Get the number of hunks in a patch

\n", + "comments": "", + "group": "patch" + }, + "git_patch_line_stats": { + "type": "function", + "file": "patch.h", + "line": 164, + "lineto": 168, + "args": [ + { + "name": "total_context", + "type": "size_t *", + "comment": "Count of context lines in output, can be NULL." + }, + { + "name": "total_additions", + "type": "size_t *", + "comment": "Count of addition lines in output, can be NULL." + }, + { + "name": "total_deletions", + "type": "size_t *", + "comment": "Count of deletion lines in output, can be NULL." + }, + { + "name": "patch", + "type": "const git_patch *", + "comment": "The git_patch object" + } + ], + "argline": "size_t *total_context, size_t *total_additions, size_t *total_deletions, const git_patch *patch", + "sig": "size_t *::size_t *::size_t *::const git_patch *", + "return": { + "type": "int", + "comment": " 0 on success, \n<\n0 on error" + }, + "description": "

Get line counts of each type in a patch.

\n", + "comments": "

This helps imitate a diff --numstat type of output. For that purpose, you only need the total_additions and total_deletions values, but we include the total_context line count in case you want the total number of lines of diff output that will be generated.

\n\n

All outputs are optional. Pass NULL if you don't need a particular count.

\n", + "group": "patch" + }, + "git_patch_get_hunk": { + "type": "function", + "file": "patch.h", + "line": 183, + "lineto": 187, + "args": [ + { + "name": "out", + "type": "const git_diff_hunk **", + "comment": "Output pointer to git_diff_hunk of hunk" + }, + { + "name": "lines_in_hunk", + "type": "size_t *", + "comment": "Output count of total lines in this hunk" + }, + { + "name": "patch", + "type": "git_patch *", + "comment": "Input pointer to patch object" + }, + { + "name": "hunk_idx", + "type": "size_t", + "comment": "Input index of hunk to get information about" + } + ], + "argline": "const git_diff_hunk **out, size_t *lines_in_hunk, git_patch *patch, size_t hunk_idx", + "sig": "const git_diff_hunk **::size_t *::git_patch *::size_t", + "return": { + "type": "int", + "comment": " 0 on success, GIT_ENOTFOUND if hunk_idx out of range, \n<\n0 on error" + }, + "description": "

Get the information about a hunk in a patch

\n", + "comments": "

Given a patch and a hunk index into the patch, this returns detailed information about that hunk. Any of the output pointers can be passed as NULL if you don't care about that particular piece of information.

\n", + "group": "patch" + }, + "git_patch_num_lines_in_hunk": { + "type": "function", + "file": "patch.h", + "line": 196, + "lineto": 198, + "args": [ + { + "name": "patch", + "type": "const git_patch *", + "comment": "The git_patch object" + }, + { + "name": "hunk_idx", + "type": "size_t", + "comment": "Index of the hunk" + } + ], + "argline": "const git_patch *patch, size_t hunk_idx", + "sig": "const git_patch *::size_t", + "return": { + "type": "int", + "comment": " Number of lines in hunk or -1 if invalid hunk index" + }, + "description": "

Get the number of lines in a hunk.

\n", + "comments": "", + "group": "patch" + }, + "git_patch_get_line_in_hunk": { + "type": "function", + "file": "patch.h", + "line": 214, + "lineto": 218, + "args": [ + { + "name": "out", + "type": "const git_diff_line **", + "comment": "The git_diff_line data for this line" + }, + { + "name": "patch", + "type": "git_patch *", + "comment": "The patch to look in" + }, + { + "name": "hunk_idx", + "type": "size_t", + "comment": "The index of the hunk" + }, + { + "name": "line_of_hunk", + "type": "size_t", + "comment": "The index of the line in the hunk" + } + ], + "argline": "const git_diff_line **out, git_patch *patch, size_t hunk_idx, size_t line_of_hunk", + "sig": "const git_diff_line **::git_patch *::size_t::size_t", + "return": { + "type": "int", + "comment": " 0 on success, \n<\n0 on failure" + }, + "description": "

Get data about a line in a hunk of a patch.

\n", + "comments": "

Given a patch, a hunk index, and a line index in the hunk, this will return a lot of details about that line. If you pass a hunk index larger than the number of hunks or a line index larger than the number of lines in the hunk, this will return -1.

\n", + "group": "patch" + }, + "git_patch_size": { + "type": "function", + "file": "patch.h", + "line": 236, + "lineto": 240, + "args": [ + { + "name": "patch", + "type": "git_patch *", + "comment": "A git_patch representing changes to one file" + }, + { + "name": "include_context", + "type": "int", + "comment": "Include context lines in size if non-zero" + }, + { + "name": "include_hunk_headers", + "type": "int", + "comment": "Include hunk header lines if non-zero" + }, + { + "name": "include_file_headers", + "type": "int", + "comment": "Include file header lines if non-zero" + } + ], + "argline": "git_patch *patch, int include_context, int include_hunk_headers, int include_file_headers", + "sig": "git_patch *::int::int::int", + "return": { + "type": "size_t", + "comment": " The number of bytes of data" + }, + "description": "

Look up size of patch diff data in bytes

\n", + "comments": "

This returns the raw size of the patch data. This only includes the actual data from the lines of the diff, not the file or hunk headers.

\n\n

If you pass include_context as true (non-zero), this will be the size of all of the diff output; if you pass it as false (zero), this will only include the actual changed lines (as if context_lines was 0).

\n", + "group": "patch" + }, + "git_patch_print": { + "type": "function", + "file": "patch.h", + "line": 254, + "lineto": 257, + "args": [ + { + "name": "patch", + "type": "git_patch *", + "comment": "A git_patch representing changes to one file" + }, + { + "name": "print_cb", + "type": "git_diff_line_cb", + "comment": "Callback function to output lines of the patch. Will be\n called for file headers, hunk headers, and diff lines." + }, + { + "name": "payload", + "type": "void *", + "comment": "Reference pointer that will be passed to your callbacks." + } + ], + "argline": "git_patch *patch, git_diff_line_cb print_cb, void *payload", + "sig": "git_patch *::git_diff_line_cb::void *", + "return": { + "type": "int", + "comment": " 0 on success, non-zero callback return value, or error code" + }, + "description": "

Serialize the patch to text via callback.

\n", + "comments": "

Returning a non-zero value from the callback will terminate the iteration and return that value to the caller.

\n", + "group": "patch" + }, + "git_patch_to_buf": { + "type": "function", + "file": "patch.h", + "line": 266, + "lineto": 268, + "args": [ + { + "name": "out", + "type": "git_buf *", + "comment": "The git_buf to be filled in" + }, + { + "name": "patch", + "type": "git_patch *", + "comment": "A git_patch representing changes to one file" + } + ], + "argline": "git_buf *out, git_patch *patch", + "sig": "git_buf *::git_patch *", + "return": { + "type": "int", + "comment": " 0 on success, \n<\n0 on failure." + }, + "description": "

Get the content of a patch as a single diff text.

\n", + "comments": "", + "group": "patch" + }, + "git_pathspec_new": { + "type": "function", + "file": "pathspec.h", + "line": 65, + "lineto": 66, + "args": [ + { + "name": "out", + "type": "git_pathspec **", + "comment": "Output of the compiled pathspec" + }, + { + "name": "pathspec", + "type": "const git_strarray *", + "comment": "A git_strarray of the paths to match" + } + ], + "argline": "git_pathspec **out, const git_strarray *pathspec", + "sig": "git_pathspec **::const git_strarray *", + "return": { + "type": "int", + "comment": " 0 on success, \n<\n0 on failure" + }, + "description": "

Compile a pathspec

\n", + "comments": "", + "group": "pathspec", + "examples": { + "log.c": [ + "ex/HEAD/log.html#git_pathspec_new-41" + ] + } + }, + "git_pathspec_free": { + "type": "function", + "file": "pathspec.h", + "line": 73, + "lineto": 73, + "args": [ + { + "name": "ps", + "type": "git_pathspec *", + "comment": "The compiled pathspec" + } + ], + "argline": "git_pathspec *ps", + "sig": "git_pathspec *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Free a pathspec

\n", + "comments": "", + "group": "pathspec", + "examples": { + "log.c": [ + "ex/HEAD/log.html#git_pathspec_free-42" + ] + } + }, + "git_pathspec_matches_path": { + "type": "function", + "file": "pathspec.h", + "line": 88, + "lineto": 89, + "args": [ + { + "name": "ps", + "type": "const git_pathspec *", + "comment": "The compiled pathspec" + }, + { + "name": "flags", + "type": "uint32_t", + "comment": "Combination of git_pathspec_flag_t options to control match" + }, + { + "name": "path", + "type": "const char *", + "comment": "The pathname to attempt to match" + } + ], + "argline": "const git_pathspec *ps, uint32_t flags, const char *path", + "sig": "const git_pathspec *::uint32_t::const char *", + "return": { + "type": "int", + "comment": " 1 is path matches spec, 0 if it does not" + }, + "description": "

Try to match a path against a pathspec

\n", + "comments": "

Unlike most of the other pathspec matching functions, this will not fall back on the native case-sensitivity for your platform. You must explicitly pass flags to control case sensitivity or else this will fall back on being case sensitive.

\n", + "group": "pathspec" + }, + "git_pathspec_match_workdir": { + "type": "function", + "file": "pathspec.h", + "line": 113, + "lineto": 117, + "args": [ + { + "name": "out", + "type": "git_pathspec_match_list **", + "comment": "Output list of matches; pass NULL to just get return value" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "The repository in which to match; bare repo is an error" + }, + { + "name": "flags", + "type": "uint32_t", + "comment": "Combination of git_pathspec_flag_t options to control match" + }, + { + "name": "ps", + "type": "git_pathspec *", + "comment": "Pathspec to be matched" + } + ], + "argline": "git_pathspec_match_list **out, git_repository *repo, uint32_t flags, git_pathspec *ps", + "sig": "git_pathspec_match_list **::git_repository *::uint32_t::git_pathspec *", + "return": { + "type": "int", + "comment": " 0 on success, -1 on error, GIT_ENOTFOUND if no matches and\n the GIT_PATHSPEC_NO_MATCH_ERROR flag was given" + }, + "description": "

Match a pathspec against the working directory of a repository.

\n", + "comments": "

This matches the pathspec against the current files in the working directory of the repository. It is an error to invoke this on a bare repo. This handles git ignores (i.e. ignored files will not be considered to match the pathspec unless the file is tracked in the index).

\n\n

If out is not NULL, this returns a git_patchspec_match_list. That contains the list of all matched filenames (unless you pass the GIT_PATHSPEC_FAILURES_ONLY flag) and may also contain the list of pathspecs with no match (if you used the GIT_PATHSPEC_FIND_FAILURES flag). You must call git_pathspec_match_list_free() on this object.

\n", + "group": "pathspec" + }, + "git_pathspec_match_index": { + "type": "function", + "file": "pathspec.h", + "line": 142, + "lineto": 146, + "args": [ + { + "name": "out", + "type": "git_pathspec_match_list **", + "comment": "Output list of matches; pass NULL to just get return value" + }, + { + "name": "index", + "type": "git_index *", + "comment": "The index to match against" + }, + { + "name": "flags", + "type": "uint32_t", + "comment": "Combination of git_pathspec_flag_t options to control match" + }, + { + "name": "ps", + "type": "git_pathspec *", + "comment": "Pathspec to be matched" + } + ], + "argline": "git_pathspec_match_list **out, git_index *index, uint32_t flags, git_pathspec *ps", + "sig": "git_pathspec_match_list **::git_index *::uint32_t::git_pathspec *", + "return": { + "type": "int", + "comment": " 0 on success, -1 on error, GIT_ENOTFOUND if no matches and\n the GIT_PATHSPEC_NO_MATCH_ERROR flag is used" + }, + "description": "

Match a pathspec against entries in an index.

\n", + "comments": "

This matches the pathspec against the files in the repository index.

\n\n

NOTE: At the moment, the case sensitivity of this match is controlled by the current case-sensitivity of the index object itself and the USE_CASE and IGNORE_CASE flags will have no effect. This behavior will be corrected in a future release.

\n\n

If out is not NULL, this returns a git_patchspec_match_list. That contains the list of all matched filenames (unless you pass the GIT_PATHSPEC_FAILURES_ONLY flag) and may also contain the list of pathspecs with no match (if you used the GIT_PATHSPEC_FIND_FAILURES flag). You must call git_pathspec_match_list_free() on this object.

\n", + "group": "pathspec" + }, + "git_pathspec_match_tree": { + "type": "function", + "file": "pathspec.h", + "line": 166, + "lineto": 170, + "args": [ + { + "name": "out", + "type": "git_pathspec_match_list **", + "comment": "Output list of matches; pass NULL to just get return value" + }, + { + "name": "tree", + "type": "git_tree *", + "comment": "The root-level tree to match against" + }, + { + "name": "flags", + "type": "uint32_t", + "comment": "Combination of git_pathspec_flag_t options to control match" + }, + { + "name": "ps", + "type": "git_pathspec *", + "comment": "Pathspec to be matched" + } + ], + "argline": "git_pathspec_match_list **out, git_tree *tree, uint32_t flags, git_pathspec *ps", + "sig": "git_pathspec_match_list **::git_tree *::uint32_t::git_pathspec *", + "return": { + "type": "int", + "comment": " 0 on success, -1 on error, GIT_ENOTFOUND if no matches and\n the GIT_PATHSPEC_NO_MATCH_ERROR flag is used" + }, + "description": "

Match a pathspec against files in a tree.

\n", + "comments": "

This matches the pathspec against the files in the given tree.

\n\n

If out is not NULL, this returns a git_patchspec_match_list. That contains the list of all matched filenames (unless you pass the GIT_PATHSPEC_FAILURES_ONLY flag) and may also contain the list of pathspecs with no match (if you used the GIT_PATHSPEC_FIND_FAILURES flag). You must call git_pathspec_match_list_free() on this object.

\n", + "group": "pathspec", + "examples": { + "log.c": [ + "ex/HEAD/log.html#git_pathspec_match_tree-43" + ] + } + }, + "git_pathspec_match_diff": { + "type": "function", + "file": "pathspec.h", + "line": 190, + "lineto": 194, + "args": [ + { + "name": "out", + "type": "git_pathspec_match_list **", + "comment": "Output list of matches; pass NULL to just get return value" + }, + { + "name": "diff", + "type": "git_diff *", + "comment": "A generated diff list" + }, + { + "name": "flags", + "type": "uint32_t", + "comment": "Combination of git_pathspec_flag_t options to control match" + }, + { + "name": "ps", + "type": "git_pathspec *", + "comment": "Pathspec to be matched" + } + ], + "argline": "git_pathspec_match_list **out, git_diff *diff, uint32_t flags, git_pathspec *ps", + "sig": "git_pathspec_match_list **::git_diff *::uint32_t::git_pathspec *", + "return": { + "type": "int", + "comment": " 0 on success, -1 on error, GIT_ENOTFOUND if no matches and\n the GIT_PATHSPEC_NO_MATCH_ERROR flag is used" + }, + "description": "

Match a pathspec against files in a diff list.

\n", + "comments": "

This matches the pathspec against the files in the given diff list.

\n\n

If out is not NULL, this returns a git_patchspec_match_list. That contains the list of all matched filenames (unless you pass the GIT_PATHSPEC_FAILURES_ONLY flag) and may also contain the list of pathspecs with no match (if you used the GIT_PATHSPEC_FIND_FAILURES flag). You must call git_pathspec_match_list_free() on this object.

\n", + "group": "pathspec" + }, + "git_pathspec_match_list_free": { + "type": "function", + "file": "pathspec.h", + "line": 201, + "lineto": 201, + "args": [ + { + "name": "m", + "type": "git_pathspec_match_list *", + "comment": "The git_pathspec_match_list to be freed" + } + ], + "argline": "git_pathspec_match_list *m", + "sig": "git_pathspec_match_list *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Free memory associates with a git_pathspec_match_list

\n", + "comments": "", + "group": "pathspec" + }, + "git_pathspec_match_list_entrycount": { + "type": "function", + "file": "pathspec.h", + "line": 209, + "lineto": 210, + "args": [ + { + "name": "m", + "type": "const git_pathspec_match_list *", + "comment": "The git_pathspec_match_list object" + } + ], + "argline": "const git_pathspec_match_list *m", + "sig": "const git_pathspec_match_list *", + "return": { + "type": "size_t", + "comment": " Number of items in match list" + }, + "description": "

Get the number of items in a match list.

\n", + "comments": "", + "group": "pathspec" + }, + "git_pathspec_match_list_entry": { + "type": "function", + "file": "pathspec.h", + "line": 222, + "lineto": 223, + "args": [ + { + "name": "m", + "type": "const git_pathspec_match_list *", + "comment": "The git_pathspec_match_list object" + }, + { + "name": "pos", + "type": "size_t", + "comment": "The index into the list" + } + ], + "argline": "const git_pathspec_match_list *m, size_t pos", + "sig": "const git_pathspec_match_list *::size_t", + "return": { + "type": "const char *", + "comment": " The filename of the match" + }, + "description": "

Get a matching filename by position.

\n", + "comments": "

This routine cannot be used if the match list was generated by git_pathspec_match_diff. If so, it will always return NULL.

\n", + "group": "pathspec" + }, + "git_pathspec_match_list_diff_entry": { + "type": "function", + "file": "pathspec.h", + "line": 235, + "lineto": 236, + "args": [ + { + "name": "m", + "type": "const git_pathspec_match_list *", + "comment": "The git_pathspec_match_list object" + }, + { + "name": "pos", + "type": "size_t", + "comment": "The index into the list" + } + ], + "argline": "const git_pathspec_match_list *m, size_t pos", + "sig": "const git_pathspec_match_list *::size_t", + "return": { + "type": "const git_diff_delta *", + "comment": " The filename of the match" + }, + "description": "

Get a matching diff delta by position.

\n", + "comments": "

This routine can only be used if the match list was generated by git_pathspec_match_diff. Otherwise it will always return NULL.

\n", + "group": "pathspec" + }, + "git_pathspec_match_list_failed_entrycount": { + "type": "function", + "file": "pathspec.h", + "line": 247, + "lineto": 248, + "args": [ + { + "name": "m", + "type": "const git_pathspec_match_list *", + "comment": "The git_pathspec_match_list object" + } + ], + "argline": "const git_pathspec_match_list *m", + "sig": "const git_pathspec_match_list *", + "return": { + "type": "size_t", + "comment": " Number of items in original pathspec that had no matches" + }, + "description": "

Get the number of pathspec items that did not match.

\n", + "comments": "

This will be zero unless you passed GIT_PATHSPEC_FIND_FAILURES when generating the git_pathspec_match_list.

\n", + "group": "pathspec" + }, + "git_pathspec_match_list_failed_entry": { + "type": "function", + "file": "pathspec.h", + "line": 259, + "lineto": 260, + "args": [ + { + "name": "m", + "type": "const git_pathspec_match_list *", + "comment": "The git_pathspec_match_list object" + }, + { + "name": "pos", + "type": "size_t", + "comment": "The index into the failed items" + } + ], + "argline": "const git_pathspec_match_list *m, size_t pos", + "sig": "const git_pathspec_match_list *::size_t", + "return": { + "type": "const char *", + "comment": " The pathspec pattern that didn't match anything" + }, + "description": "

Get an original pathspec string that had no matches.

\n", + "comments": "

This will be return NULL for positions out of range.

\n", + "group": "pathspec" + }, + "git_proxy_init_options": { + "type": "function", + "file": "proxy.h", + "line": 88, + "lineto": 88, + "args": [ + { + "name": "opts", + "type": "git_proxy_options *", + "comment": "the options struct to initialize" + }, + { + "name": "version", + "type": "unsigned int", + "comment": "the version of the struct, use `GIT_PROXY_OPTIONS_VERSION`" + } + ], + "argline": "git_proxy_options *opts, unsigned int version", + "sig": "git_proxy_options *::unsigned int", + "return": { + "type": "int", + "comment": null + }, + "description": "

Initialize a proxy options structure

\n", + "comments": "", + "group": "proxy" + }, + "git_rebase_init_options": { + "type": "function", + "file": "rebase.h", + "line": 156, + "lineto": 158, + "args": [ + { + "name": "opts", + "type": "git_rebase_options *", + "comment": "the `git_rebase_options` instance to initialize." + }, + { + "name": "version", + "type": "unsigned int", + "comment": "the version of the struct; you should pass\n `GIT_REBASE_OPTIONS_VERSION` here." + } + ], + "argline": "git_rebase_options *opts, unsigned int version", + "sig": "git_rebase_options *::unsigned int", + "return": { + "type": "int", + "comment": " Zero on success; -1 on failure." + }, + "description": "

Initializes a git_rebase_options with default values. Equivalent to\n creating an instance with GIT_REBASE_OPTIONS_INIT.

\n", + "comments": "", + "group": "rebase" + }, + "git_rebase_init": { + "type": "function", + "file": "rebase.h", + "line": 177, + "lineto": 183, + "args": [ + { + "name": "out", + "type": "git_rebase **", + "comment": "Pointer to store the rebase object" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "The repository to perform the rebase" + }, + { + "name": "branch", + "type": "const git_annotated_commit *", + "comment": "The terminal commit to rebase, or NULL to rebase the\n current branch" + }, + { + "name": "upstream", + "type": "const git_annotated_commit *", + "comment": "The commit to begin rebasing from, or NULL to rebase all\n reachable commits" + }, + { + "name": "onto", + "type": "const git_annotated_commit *", + "comment": "The branch to rebase onto, or NULL to rebase onto the given\n upstream" + }, + { + "name": "opts", + "type": "const git_rebase_options *", + "comment": "Options to specify how rebase is performed, or NULL" + } + ], + "argline": "git_rebase **out, git_repository *repo, const git_annotated_commit *branch, const git_annotated_commit *upstream, const git_annotated_commit *onto, const git_rebase_options *opts", + "sig": "git_rebase **::git_repository *::const git_annotated_commit *::const git_annotated_commit *::const git_annotated_commit *::const git_rebase_options *", + "return": { + "type": "int", + "comment": " Zero on success; -1 on failure." + }, + "description": "

Initializes a rebase operation to rebase the changes in branch\n relative to upstream onto another branch. To begin the rebase\n process, call git_rebase_next. When you have finished with this\n object, call git_rebase_free.

\n", + "comments": "", + "group": "rebase" + }, + "git_rebase_open": { + "type": "function", + "file": "rebase.h", + "line": 194, + "lineto": 197, + "args": [ + { + "name": "out", + "type": "git_rebase **", + "comment": "Pointer to store the rebase object" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "The repository that has a rebase in-progress" + }, + { + "name": "opts", + "type": "const git_rebase_options *", + "comment": "Options to specify how rebase is performed" + } + ], + "argline": "git_rebase **out, git_repository *repo, const git_rebase_options *opts", + "sig": "git_rebase **::git_repository *::const git_rebase_options *", + "return": { + "type": "int", + "comment": " Zero on success; -1 on failure." + }, + "description": "

Opens an existing rebase that was previously started by either an\n invocation of git_rebase_init or by another client.

\n", + "comments": "", + "group": "rebase" + }, + "git_rebase_operation_entrycount": { + "type": "function", + "file": "rebase.h", + "line": 205, + "lineto": 205, + "args": [ + { + "name": "rebase", + "type": "git_rebase *", + "comment": "The in-progress rebase" + } + ], + "argline": "git_rebase *rebase", + "sig": "git_rebase *", + "return": { + "type": "size_t", + "comment": " The number of rebase operations in total" + }, + "description": "

Gets the count of rebase operations that are to be applied.

\n", + "comments": "", + "group": "rebase" + }, + "git_rebase_operation_current": { + "type": "function", + "file": "rebase.h", + "line": 216, + "lineto": 216, + "args": [ + { + "name": "rebase", + "type": "git_rebase *", + "comment": "The in-progress rebase" + } + ], + "argline": "git_rebase *rebase", + "sig": "git_rebase *", + "return": { + "type": "size_t", + "comment": " The index of the rebase operation currently being applied." + }, + "description": "

Gets the index of the rebase operation that is currently being applied.\n If the first operation has not yet been applied (because you have\n called init but not yet next) then this returns\n GIT_REBASE_NO_OPERATION.

\n", + "comments": "", + "group": "rebase" + }, + "git_rebase_operation_byindex": { + "type": "function", + "file": "rebase.h", + "line": 225, + "lineto": 227, + "args": [ + { + "name": "rebase", + "type": "git_rebase *", + "comment": "The in-progress rebase" + }, + { + "name": "idx", + "type": "size_t", + "comment": "The index of the rebase operation to retrieve" + } + ], + "argline": "git_rebase *rebase, size_t idx", + "sig": "git_rebase *::size_t", + "return": { + "type": "git_rebase_operation *", + "comment": " The rebase operation or NULL if `idx` was out of bounds" + }, + "description": "

Gets the rebase operation specified by the given index.

\n", + "comments": "", + "group": "rebase" + }, + "git_rebase_next": { + "type": "function", + "file": "rebase.h", + "line": 240, + "lineto": 242, + "args": [ + { + "name": "operation", + "type": "git_rebase_operation **", + "comment": "Pointer to store the rebase operation that is to be performed next" + }, + { + "name": "rebase", + "type": "git_rebase *", + "comment": "The rebase in progress" + } + ], + "argline": "git_rebase_operation **operation, git_rebase *rebase", + "sig": "git_rebase_operation **::git_rebase *", + "return": { + "type": "int", + "comment": " Zero on success; -1 on failure." + }, + "description": "

Performs the next rebase operation and returns the information about it.\n If the operation is one that applies a patch (which is any operation except\n GIT_REBASE_OPERATION_EXEC) then the patch will be applied and the index and\n working directory will be updated with the changes. If there are conflicts,\n you will need to address those before committing the changes.

\n", + "comments": "", + "group": "rebase" + }, + "git_rebase_inmemory_index": { + "type": "function", + "file": "rebase.h", + "line": 255, + "lineto": 257, + "args": [ + { + "name": "index", + "type": "git_index **", + "comment": null + }, + { + "name": "rebase", + "type": "git_rebase *", + "comment": null + } + ], + "argline": "git_index **index, git_rebase *rebase", + "sig": "git_index **::git_rebase *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Gets the index produced by the last operation, which is the result\n of git_rebase_next and which will be committed by the next\n invocation of git_rebase_commit. This is useful for resolving\n conflicts in an in-memory rebase before committing them. You must\n call git_index_free when you are finished with this.

\n", + "comments": "

This is only applicable for in-memory rebases; for rebases within a working directory, the changes were applied to the repository's index.

\n", + "group": "rebase" + }, + "git_rebase_commit": { + "type": "function", + "file": "rebase.h", + "line": 281, + "lineto": 287, + "args": [ + { + "name": "id", + "type": "git_oid *", + "comment": "Pointer in which to store the OID of the newly created commit" + }, + { + "name": "rebase", + "type": "git_rebase *", + "comment": "The rebase that is in-progress" + }, + { + "name": "author", + "type": "const git_signature *", + "comment": "The author of the updated commit, or NULL to keep the\n author from the original commit" + }, + { + "name": "committer", + "type": "const git_signature *", + "comment": "The committer of the rebase" + }, + { + "name": "message_encoding", + "type": "const char *", + "comment": "The encoding for the message in the commit,\n represented with a standard encoding name. If message is NULL,\n this should also be NULL, and the encoding from the original\n commit will be maintained. If message is specified, this may be\n NULL to indicate that \"UTF-8\" is to be used." + }, + { + "name": "message", + "type": "const char *", + "comment": "The message for this commit, or NULL to use the message\n from the original commit." + } + ], + "argline": "git_oid *id, git_rebase *rebase, const git_signature *author, const git_signature *committer, const char *message_encoding, const char *message", + "sig": "git_oid *::git_rebase *::const git_signature *::const git_signature *::const char *::const char *", + "return": { + "type": "int", + "comment": " Zero on success, GIT_EUNMERGED if there are unmerged changes in\n the index, GIT_EAPPLIED if the current commit has already\n been applied to the upstream and there is nothing to commit,\n -1 on failure." + }, + "description": "

Commits the current patch. You must have resolved any conflicts that\n were introduced during the patch application from the git_rebase_next\n invocation.

\n", + "comments": "", + "group": "rebase" + }, + "git_rebase_abort": { + "type": "function", + "file": "rebase.h", + "line": 297, + "lineto": 297, + "args": [ + { + "name": "rebase", + "type": "git_rebase *", + "comment": "The rebase that is in-progress" + } + ], + "argline": "git_rebase *rebase", + "sig": "git_rebase *", + "return": { + "type": "int", + "comment": " Zero on success; GIT_ENOTFOUND if a rebase is not in progress,\n -1 on other errors." + }, + "description": "

Aborts a rebase that is currently in progress, resetting the repository\n and working directory to their state before rebase began.

\n", + "comments": "", + "group": "rebase" + }, + "git_rebase_finish": { + "type": "function", + "file": "rebase.h", + "line": 307, + "lineto": 309, + "args": [ + { + "name": "rebase", + "type": "git_rebase *", + "comment": "The rebase that is in-progress" + }, + { + "name": "signature", + "type": "const git_signature *", + "comment": "The identity that is finishing the rebase (optional)" + } + ], + "argline": "git_rebase *rebase, const git_signature *signature", + "sig": "git_rebase *::const git_signature *", + "return": { + "type": "int", + "comment": " Zero on success; -1 on error" + }, + "description": "

Finishes a rebase that is currently in progress once all patches have\n been applied.

\n", + "comments": "", + "group": "rebase" + }, + "git_rebase_free": { + "type": "function", + "file": "rebase.h", + "line": 316, + "lineto": 316, + "args": [ + { + "name": "rebase", + "type": "git_rebase *", + "comment": "The rebase object" + } + ], + "argline": "git_rebase *rebase", + "sig": "git_rebase *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Frees the git_rebase object.

\n", + "comments": "", + "group": "rebase" + }, + "git_refdb_new": { + "type": "function", + "file": "refdb.h", + "line": 35, + "lineto": 35, + "args": [ + { + "name": "out", + "type": "git_refdb **", + "comment": "location to store the database pointer, if opened.\n\t\t\tSet to NULL if the open failed." + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository" + } + ], + "argline": "git_refdb **out, git_repository *repo", + "sig": "git_refdb **::git_repository *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Create a new reference database with no backends.

\n", + "comments": "

Before the Ref DB can be used for read/writing, a custom database backend must be manually set using git_refdb_set_backend()

\n", + "group": "refdb" + }, + "git_refdb_open": { + "type": "function", + "file": "refdb.h", + "line": 49, + "lineto": 49, + "args": [ + { + "name": "out", + "type": "git_refdb **", + "comment": "location to store the database pointer, if opened.\n\t\t\tSet to NULL if the open failed." + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository" + } + ], + "argline": "git_refdb **out, git_repository *repo", + "sig": "git_refdb **::git_repository *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Create a new reference database and automatically add\n the default backends:

\n", + "comments": "
    \n
  • git_refdb_dir: read and write loose and packed refs from disk, assuming the repository dir as the folder
  • \n
\n", + "group": "refdb" + }, + "git_refdb_compress": { + "type": "function", + "file": "refdb.h", + "line": 56, + "lineto": 56, + "args": [ + { + "name": "refdb", + "type": "git_refdb *", + "comment": null + } + ], + "argline": "git_refdb *refdb", + "sig": "git_refdb *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Suggests that the given refdb compress or optimize its references.\n This mechanism is implementation specific. For on-disk reference\n databases, for example, this may pack all loose references.

\n", + "comments": "", + "group": "refdb" + }, + "git_refdb_free": { + "type": "function", + "file": "refdb.h", + "line": 63, + "lineto": 63, + "args": [ + { + "name": "refdb", + "type": "git_refdb *", + "comment": "reference database pointer or NULL" + } + ], + "argline": "git_refdb *refdb", + "sig": "git_refdb *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Close an open reference database.

\n", + "comments": "", + "group": "refdb" + }, + "git_reflog_read": { + "type": "function", + "file": "reflog.h", + "line": 38, + "lineto": 38, + "args": [ + { + "name": "out", + "type": "git_reflog **", + "comment": "pointer to reflog" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repostiory" + }, + { + "name": "name", + "type": "const char *", + "comment": "reference to look up" + } + ], + "argline": "git_reflog **out, git_repository *repo, const char *name", + "sig": "git_reflog **::git_repository *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Read the reflog for the given reference

\n", + "comments": "

If there is no reflog file for the given reference yet, an empty reflog object will be returned.

\n\n

The reflog must be freed manually by using git_reflog_free().

\n", + "group": "reflog" + }, + "git_reflog_write": { + "type": "function", + "file": "reflog.h", + "line": 47, + "lineto": 47, + "args": [ + { + "name": "reflog", + "type": "git_reflog *", + "comment": "an existing reflog object" + } + ], + "argline": "git_reflog *reflog", + "sig": "git_reflog *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Write an existing in-memory reflog object back to disk\n using an atomic file lock.

\n", + "comments": "", + "group": "reflog" + }, + "git_reflog_append": { + "type": "function", + "file": "reflog.h", + "line": 60, + "lineto": 60, + "args": [ + { + "name": "reflog", + "type": "git_reflog *", + "comment": "an existing reflog object" + }, + { + "name": "id", + "type": "const git_oid *", + "comment": "the OID the reference is now pointing to" + }, + { + "name": "committer", + "type": "const git_signature *", + "comment": "the signature of the committer" + }, + { + "name": "msg", + "type": "const char *", + "comment": "the reflog message" + } + ], + "argline": "git_reflog *reflog, const git_oid *id, const git_signature *committer, const char *msg", + "sig": "git_reflog *::const git_oid *::const git_signature *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Add a new entry to the in-memory reflog.

\n", + "comments": "

msg is optional and can be NULL.

\n", + "group": "reflog" + }, + "git_reflog_rename": { + "type": "function", + "file": "reflog.h", + "line": 75, + "lineto": 75, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository" + }, + { + "name": "old_name", + "type": "const char *", + "comment": "the old name of the reference" + }, + { + "name": "name", + "type": "const char *", + "comment": "the new name of the reference" + } + ], + "argline": "git_repository *repo, const char *old_name, const char *name", + "sig": "git_repository *::const char *::const char *", + "return": { + "type": "int", + "comment": " 0 on success, GIT_EINVALIDSPEC or an error code" + }, + "description": "

Rename a reflog

\n", + "comments": "

The reflog to be renamed is expected to already exist

\n\n

The new name will be checked for validity. See git_reference_create_symbolic() for rules about valid names.

\n", + "group": "reflog" + }, + "git_reflog_delete": { + "type": "function", + "file": "reflog.h", + "line": 84, + "lineto": 84, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository" + }, + { + "name": "name", + "type": "const char *", + "comment": "the reflog to delete" + } + ], + "argline": "git_repository *repo, const char *name", + "sig": "git_repository *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Delete the reflog for the given reference

\n", + "comments": "", + "group": "reflog" + }, + "git_reflog_entrycount": { + "type": "function", + "file": "reflog.h", + "line": 92, + "lineto": 92, + "args": [ + { + "name": "reflog", + "type": "git_reflog *", + "comment": "the previously loaded reflog" + } + ], + "argline": "git_reflog *reflog", + "sig": "git_reflog *", + "return": { + "type": "size_t", + "comment": " the number of log entries" + }, + "description": "

Get the number of log entries in a reflog

\n", + "comments": "", + "group": "reflog" + }, + "git_reflog_entry_byindex": { + "type": "function", + "file": "reflog.h", + "line": 105, + "lineto": 105, + "args": [ + { + "name": "reflog", + "type": "const git_reflog *", + "comment": "a previously loaded reflog" + }, + { + "name": "idx", + "type": "size_t", + "comment": "the position of the entry to lookup. Should be greater than or\n equal to 0 (zero) and less than `git_reflog_entrycount()`." + } + ], + "argline": "const git_reflog *reflog, size_t idx", + "sig": "const git_reflog *::size_t", + "return": { + "type": "const git_reflog_entry *", + "comment": " the entry; NULL if not found" + }, + "description": "

Lookup an entry by its index

\n", + "comments": "

Requesting the reflog entry with an index of 0 (zero) will return the most recently created entry.

\n", + "group": "reflog" + }, + "git_reflog_drop": { + "type": "function", + "file": "reflog.h", + "line": 124, + "lineto": 127, + "args": [ + { + "name": "reflog", + "type": "git_reflog *", + "comment": "a previously loaded reflog." + }, + { + "name": "idx", + "type": "size_t", + "comment": "the position of the entry to remove. Should be greater than or\n equal to 0 (zero) and less than `git_reflog_entrycount()`." + }, + { + "name": "rewrite_previous_entry", + "type": "int", + "comment": "1 to rewrite the history; 0 otherwise." + } + ], + "argline": "git_reflog *reflog, size_t idx, int rewrite_previous_entry", + "sig": "git_reflog *::size_t::int", + "return": { + "type": "int", + "comment": " 0 on success, GIT_ENOTFOUND if the entry doesn't exist\n or an error code." + }, + "description": "

Remove an entry from the reflog by its index

\n", + "comments": "

To ensure there's no gap in the log history, set rewrite_previous_entry param value to 1. When deleting entry n, member old_oid of entry n-1 (if any) will be updated with the value of member new_oid of entry n+1.

\n", + "group": "reflog" + }, + "git_reflog_entry_id_old": { + "type": "function", + "file": "reflog.h", + "line": 135, + "lineto": 135, + "args": [ + { + "name": "entry", + "type": "const git_reflog_entry *", + "comment": "a reflog entry" + } + ], + "argline": "const git_reflog_entry *entry", + "sig": "const git_reflog_entry *", + "return": { + "type": "const git_oid *", + "comment": " the old oid" + }, + "description": "

Get the old oid

\n", + "comments": "", + "group": "reflog" + }, + "git_reflog_entry_id_new": { + "type": "function", + "file": "reflog.h", + "line": 143, + "lineto": 143, + "args": [ + { + "name": "entry", + "type": "const git_reflog_entry *", + "comment": "a reflog entry" + } + ], + "argline": "const git_reflog_entry *entry", + "sig": "const git_reflog_entry *", + "return": { + "type": "const git_oid *", + "comment": " the new oid at this time" + }, + "description": "

Get the new oid

\n", + "comments": "", + "group": "reflog" + }, + "git_reflog_entry_committer": { + "type": "function", + "file": "reflog.h", + "line": 151, + "lineto": 151, + "args": [ + { + "name": "entry", + "type": "const git_reflog_entry *", + "comment": "a reflog entry" + } + ], + "argline": "const git_reflog_entry *entry", + "sig": "const git_reflog_entry *", + "return": { + "type": "const git_signature *", + "comment": " the committer" + }, + "description": "

Get the committer of this entry

\n", + "comments": "", + "group": "reflog" + }, + "git_reflog_entry_message": { + "type": "function", + "file": "reflog.h", + "line": 159, + "lineto": 159, + "args": [ + { + "name": "entry", + "type": "const git_reflog_entry *", + "comment": "a reflog entry" + } + ], + "argline": "const git_reflog_entry *entry", + "sig": "const git_reflog_entry *", + "return": { + "type": "const char *", + "comment": " the log msg" + }, + "description": "

Get the log message

\n", + "comments": "", + "group": "reflog" + }, + "git_reflog_free": { + "type": "function", + "file": "reflog.h", + "line": 166, + "lineto": 166, + "args": [ + { + "name": "reflog", + "type": "git_reflog *", + "comment": "reflog to free" + } + ], + "argline": "git_reflog *reflog", + "sig": "git_reflog *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Free the reflog

\n", + "comments": "", + "group": "reflog" + }, + "git_reference_lookup": { + "type": "function", + "file": "refs.h", + "line": 37, + "lineto": 37, + "args": [ + { + "name": "out", + "type": "git_reference **", + "comment": "pointer to the looked-up reference" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository to look up the reference" + }, + { + "name": "name", + "type": "const char *", + "comment": "the long name for the reference (e.g. HEAD, refs/heads/master, refs/tags/v0.1.0, ...)" + } + ], + "argline": "git_reference **out, git_repository *repo, const char *name", + "sig": "git_reference **::git_repository *::const char *", + "return": { + "type": "int", + "comment": " 0 on success, GIT_ENOTFOUND, GIT_EINVALIDSPEC or an error code." + }, + "description": "

Lookup a reference by name in a repository.

\n", + "comments": "

The returned reference must be freed by the user.

\n\n

The name will be checked for validity. See git_reference_symbolic_create() for rules about valid names.

\n", + "group": "reference", + "examples": { + "general.c": [ + "ex/HEAD/general.html#git_reference_lookup-53" + ] + } + }, + "git_reference_name_to_id": { + "type": "function", + "file": "refs.h", + "line": 54, + "lineto": 55, + "args": [ + { + "name": "out", + "type": "git_oid *", + "comment": "Pointer to oid to be filled in" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "The repository in which to look up the reference" + }, + { + "name": "name", + "type": "const char *", + "comment": "The long name for the reference (e.g. HEAD, refs/heads/master, refs/tags/v0.1.0, ...)" + } + ], + "argline": "git_oid *out, git_repository *repo, const char *name", + "sig": "git_oid *::git_repository *::const char *", + "return": { + "type": "int", + "comment": " 0 on success, GIT_ENOTFOUND, GIT_EINVALIDSPEC or an error code." + }, + "description": "

Lookup a reference by name and resolve immediately to OID.

\n", + "comments": "

This function provides a quick way to resolve a reference name straight through to the object id that it refers to. This avoids having to allocate or free any git_reference objects for simple situations.

\n\n

The name will be checked for validity. See git_reference_symbolic_create() for rules about valid names.

\n", + "group": "reference" + }, + "git_reference_dwim": { + "type": "function", + "file": "refs.h", + "line": 68, + "lineto": 68, + "args": [ + { + "name": "out", + "type": "git_reference **", + "comment": "pointer in which to store the reference" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository in which to look" + }, + { + "name": "shorthand", + "type": "const char *", + "comment": "the short name for the reference" + } + ], + "argline": "git_reference **out, git_repository *repo, const char *shorthand", + "sig": "git_reference **::git_repository *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Lookup a reference by DWIMing its short name

\n", + "comments": "

Apply the git precendence rules to the given shorthand to determine which reference the user is referring to.

\n", + "group": "reference" + }, + "git_reference_symbolic_create_matching": { + "type": "function", + "file": "refs.h", + "line": 109, + "lineto": 109, + "args": [ + { + "name": "out", + "type": "git_reference **", + "comment": "Pointer to the newly created reference" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "Repository where that reference will live" + }, + { + "name": "name", + "type": "const char *", + "comment": "The name of the reference" + }, + { + "name": "target", + "type": "const char *", + "comment": "The target of the reference" + }, + { + "name": "force", + "type": "int", + "comment": "Overwrite existing references" + }, + { + "name": "current_value", + "type": "const char *", + "comment": "The expected value of the reference when updating" + }, + { + "name": "log_message", + "type": "const char *", + "comment": "The one line long message to be appended to the reflog" + } + ], + "argline": "git_reference **out, git_repository *repo, const char *name, const char *target, int force, const char *current_value, const char *log_message", + "sig": "git_reference **::git_repository *::const char *::const char *::int::const char *::const char *", + "return": { + "type": "int", + "comment": " 0 on success, GIT_EEXISTS, GIT_EINVALIDSPEC, GIT_EMODIFIED or an error code" + }, + "description": "

Conditionally create a new symbolic reference.

\n", + "comments": "

A symbolic reference is a reference name that refers to another reference name. If the other name moves, the symbolic name will move, too. As a simple example, the "HEAD" reference might refer to "refs/heads/master" while on the "master" branch of a repository.

\n\n

The symbolic reference will be created in the repository and written to the disk. The generated reference object must be freed by the user.

\n\n

Valid reference names must follow one of two patterns:

\n\n
    \n
  1. Top-level names must contain only capital letters and underscores, and must begin and end with a letter. (e.g. "HEAD", "ORIG_HEAD"). 2. Names prefixed with "refs/" can be almost anything. You must avoid the characters '~', '^', ':', '\\', '?', '[', and '*', and the sequences ".." and "@{" which have special meaning to revparse.
  2. \n
\n\n

This function will return an error if a reference already exists with the given name unless force is true, in which case it will be overwritten.

\n\n

The message for the reflog will be ignored if the reference does not belong in the standard set (HEAD, branches and remote-tracking branches) and it does not have a reflog.

\n\n

It will return GIT_EMODIFIED if the reference's value at the time of updating does not match the one passed through current_value (i.e. if the ref has changed since the user read it).

\n", + "group": "reference" + }, + "git_reference_symbolic_create": { + "type": "function", + "file": "refs.h", + "line": 145, + "lineto": 145, + "args": [ + { + "name": "out", + "type": "git_reference **", + "comment": "Pointer to the newly created reference" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "Repository where that reference will live" + }, + { + "name": "name", + "type": "const char *", + "comment": "The name of the reference" + }, + { + "name": "target", + "type": "const char *", + "comment": "The target of the reference" + }, + { + "name": "force", + "type": "int", + "comment": "Overwrite existing references" + }, + { + "name": "log_message", + "type": "const char *", + "comment": "The one line long message to be appended to the reflog" + } + ], + "argline": "git_reference **out, git_repository *repo, const char *name, const char *target, int force, const char *log_message", + "sig": "git_reference **::git_repository *::const char *::const char *::int::const char *", + "return": { + "type": "int", + "comment": " 0 on success, GIT_EEXISTS, GIT_EINVALIDSPEC or an error code" + }, + "description": "

Create a new symbolic reference.

\n", + "comments": "

A symbolic reference is a reference name that refers to another reference name. If the other name moves, the symbolic name will move, too. As a simple example, the "HEAD" reference might refer to "refs/heads/master" while on the "master" branch of a repository.

\n\n

The symbolic reference will be created in the repository and written to the disk. The generated reference object must be freed by the user.

\n\n

Valid reference names must follow one of two patterns:

\n\n
    \n
  1. Top-level names must contain only capital letters and underscores, and must begin and end with a letter. (e.g. "HEAD", "ORIG_HEAD"). 2. Names prefixed with "refs/" can be almost anything. You must avoid the characters '~', '^', ':', '\\', '?', '[', and '*', and the sequences ".." and "@{" which have special meaning to revparse.
  2. \n
\n\n

This function will return an error if a reference already exists with the given name unless force is true, in which case it will be overwritten.

\n\n

The message for the reflog will be ignored if the reference does not belong in the standard set (HEAD, branches and remote-tracking branches) and it does not have a reflog.

\n", + "group": "reference" + }, + "git_reference_create": { + "type": "function", + "file": "refs.h", + "line": 182, + "lineto": 182, + "args": [ + { + "name": "out", + "type": "git_reference **", + "comment": "Pointer to the newly created reference" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "Repository where that reference will live" + }, + { + "name": "name", + "type": "const char *", + "comment": "The name of the reference" + }, + { + "name": "id", + "type": "const git_oid *", + "comment": "The object id pointed to by the reference." + }, + { + "name": "force", + "type": "int", + "comment": "Overwrite existing references" + }, + { + "name": "log_message", + "type": "const char *", + "comment": "The one line long message to be appended to the reflog" + } + ], + "argline": "git_reference **out, git_repository *repo, const char *name, const git_oid *id, int force, const char *log_message", + "sig": "git_reference **::git_repository *::const char *::const git_oid *::int::const char *", + "return": { + "type": "int", + "comment": " 0 on success, GIT_EEXISTS, GIT_EINVALIDSPEC or an error code" + }, + "description": "

Create a new direct reference.

\n", + "comments": "

A direct reference (also called an object id reference) refers directly to a specific object id (a.k.a. OID or SHA) in the repository. The id permanently refers to the object (although the reference itself can be moved). For example, in libgit2 the direct ref "refs/tags/v0.17.0" refers to OID 5b9fac39d8a76b9139667c26a63e6b3f204b3977.

\n\n

The direct reference will be created in the repository and written to the disk. The generated reference object must be freed by the user.

\n\n

Valid reference names must follow one of two patterns:

\n\n
    \n
  1. Top-level names must contain only capital letters and underscores, and must begin and end with a letter. (e.g. "HEAD", "ORIG_HEAD"). 2. Names prefixed with "refs/" can be almost anything. You must avoid the characters '~', '^', ':', '\\', '?', '[', and '*', and the sequences ".." and "@{" which have special meaning to revparse.
  2. \n
\n\n

This function will return an error if a reference already exists with the given name unless force is true, in which case it will be overwritten.

\n\n

The message for the reflog will be ignored if the reference does not belong in the standard set (HEAD, branches and remote-tracking branches) and and it does not have a reflog.

\n", + "group": "reference" + }, + "git_reference_create_matching": { + "type": "function", + "file": "refs.h", + "line": 225, + "lineto": 225, + "args": [ + { + "name": "out", + "type": "git_reference **", + "comment": "Pointer to the newly created reference" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "Repository where that reference will live" + }, + { + "name": "name", + "type": "const char *", + "comment": "The name of the reference" + }, + { + "name": "id", + "type": "const git_oid *", + "comment": "The object id pointed to by the reference." + }, + { + "name": "force", + "type": "int", + "comment": "Overwrite existing references" + }, + { + "name": "current_id", + "type": "const git_oid *", + "comment": "The expected value of the reference at the time of update" + }, + { + "name": "log_message", + "type": "const char *", + "comment": "The one line long message to be appended to the reflog" + } + ], + "argline": "git_reference **out, git_repository *repo, const char *name, const git_oid *id, int force, const git_oid *current_id, const char *log_message", + "sig": "git_reference **::git_repository *::const char *::const git_oid *::int::const git_oid *::const char *", + "return": { + "type": "int", + "comment": " 0 on success, GIT_EMODIFIED if the value of the reference\n has changed, GIT_EEXISTS, GIT_EINVALIDSPEC or an error code" + }, + "description": "

Conditionally create new direct reference

\n", + "comments": "

A direct reference (also called an object id reference) refers directly to a specific object id (a.k.a. OID or SHA) in the repository. The id permanently refers to the object (although the reference itself can be moved). For example, in libgit2 the direct ref "refs/tags/v0.17.0" refers to OID 5b9fac39d8a76b9139667c26a63e6b3f204b3977.

\n\n

The direct reference will be created in the repository and written to the disk. The generated reference object must be freed by the user.

\n\n

Valid reference names must follow one of two patterns:

\n\n
    \n
  1. Top-level names must contain only capital letters and underscores, and must begin and end with a letter. (e.g. "HEAD", "ORIG_HEAD"). 2. Names prefixed with "refs/" can be almost anything. You must avoid the characters '~', '^', ':', '\\', '?', '[', and '*', and the sequences ".." and "@{" which have special meaning to revparse.
  2. \n
\n\n

This function will return an error if a reference already exists with the given name unless force is true, in which case it will be overwritten.

\n\n

The message for the reflog will be ignored if the reference does not belong in the standard set (HEAD, branches and remote-tracking branches) and and it does not have a reflog.

\n\n

It will return GIT_EMODIFIED if the reference's value at the time of updating does not match the one passed through current_id (i.e. if the ref has changed since the user read it).

\n", + "group": "reference" + }, + "git_reference_target": { + "type": "function", + "file": "refs.h", + "line": 240, + "lineto": 240, + "args": [ + { + "name": "ref", + "type": "const git_reference *", + "comment": "The reference" + } + ], + "argline": "const git_reference *ref", + "sig": "const git_reference *", + "return": { + "type": "const git_oid *", + "comment": " a pointer to the oid if available, NULL otherwise" + }, + "description": "

Get the OID pointed to by a direct reference.

\n", + "comments": "

Only available if the reference is direct (i.e. an object id reference, not a symbolic one).

\n\n

To find the OID of a symbolic ref, call git_reference_resolve() and then this function (or maybe use git_reference_name_to_id() to directly resolve a reference name all the way through to an OID).

\n", + "group": "reference", + "examples": { + "general.c": [ + "ex/HEAD/general.html#git_reference_target-54" + ] + } + }, + "git_reference_target_peel": { + "type": "function", + "file": "refs.h", + "line": 251, + "lineto": 251, + "args": [ + { + "name": "ref", + "type": "const git_reference *", + "comment": "The reference" + } + ], + "argline": "const git_reference *ref", + "sig": "const git_reference *", + "return": { + "type": "const git_oid *", + "comment": " a pointer to the oid if available, NULL otherwise" + }, + "description": "

Return the peeled OID target of this reference.

\n", + "comments": "

This peeled OID only applies to direct references that point to a hard Tag object: it is the result of peeling such Tag.

\n", + "group": "reference" + }, + "git_reference_symbolic_target": { + "type": "function", + "file": "refs.h", + "line": 261, + "lineto": 261, + "args": [ + { + "name": "ref", + "type": "const git_reference *", + "comment": "The reference" + } + ], + "argline": "const git_reference *ref", + "sig": "const git_reference *", + "return": { + "type": "const char *", + "comment": " a pointer to the name if available, NULL otherwise" + }, + "description": "

Get full name to the reference pointed to by a symbolic reference.

\n", + "comments": "

Only available if the reference is symbolic.

\n", + "group": "reference", + "examples": { + "general.c": [ + "ex/HEAD/general.html#git_reference_symbolic_target-55" + ] + } + }, + "git_reference_type": { + "type": "function", + "file": "refs.h", + "line": 271, + "lineto": 271, + "args": [ + { + "name": "ref", + "type": "const git_reference *", + "comment": "The reference" + } + ], + "argline": "const git_reference *ref", + "sig": "const git_reference *", + "return": { + "type": "git_ref_t", + "comment": " the type" + }, + "description": "

Get the type of a reference.

\n", + "comments": "

Either direct (GIT_REF_OID) or symbolic (GIT_REF_SYMBOLIC)

\n", + "group": "reference", + "examples": { + "general.c": [ + "ex/HEAD/general.html#git_reference_type-56" + ] + } + }, + "git_reference_name": { + "type": "function", + "file": "refs.h", + "line": 281, + "lineto": 281, + "args": [ + { + "name": "ref", + "type": "const git_reference *", + "comment": "The reference" + } + ], + "argline": "const git_reference *ref", + "sig": "const git_reference *", + "return": { + "type": "const char *", + "comment": " the full name for the ref" + }, + "description": "

Get the full name of a reference.

\n", + "comments": "

See git_reference_symbolic_create() for rules about valid names.

\n", + "group": "reference" + }, + "git_reference_resolve": { + "type": "function", + "file": "refs.h", + "line": 299, + "lineto": 299, + "args": [ + { + "name": "out", + "type": "git_reference **", + "comment": "Pointer to the peeled reference" + }, + { + "name": "ref", + "type": "const git_reference *", + "comment": "The reference" + } + ], + "argline": "git_reference **out, const git_reference *ref", + "sig": "git_reference **::const git_reference *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Resolve a symbolic reference to a direct reference.

\n", + "comments": "

This method iteratively peels a symbolic reference until it resolves to a direct reference to an OID.

\n\n

The peeled reference is returned in the resolved_ref argument, and must be freed manually once it's no longer needed.

\n\n

If a direct reference is passed as an argument, a copy of that reference is returned. This copy must be manually freed too.

\n", + "group": "reference" + }, + "git_reference_owner": { + "type": "function", + "file": "refs.h", + "line": 307, + "lineto": 307, + "args": [ + { + "name": "ref", + "type": "const git_reference *", + "comment": "The reference" + } + ], + "argline": "const git_reference *ref", + "sig": "const git_reference *", + "return": { + "type": "git_repository *", + "comment": " a pointer to the repo" + }, + "description": "

Get the repository where a reference resides.

\n", + "comments": "", + "group": "reference" + }, + "git_reference_symbolic_set_target": { + "type": "function", + "file": "refs.h", + "line": 329, + "lineto": 333, + "args": [ + { + "name": "out", + "type": "git_reference **", + "comment": "Pointer to the newly created reference" + }, + { + "name": "ref", + "type": "git_reference *", + "comment": "The reference" + }, + { + "name": "target", + "type": "const char *", + "comment": "The new target for the reference" + }, + { + "name": "log_message", + "type": "const char *", + "comment": "The one line long message to be appended to the reflog" + } + ], + "argline": "git_reference **out, git_reference *ref, const char *target, const char *log_message", + "sig": "git_reference **::git_reference *::const char *::const char *", + "return": { + "type": "int", + "comment": " 0 on success, GIT_EINVALIDSPEC or an error code" + }, + "description": "

Create a new reference with the same name as the given reference but a\n different symbolic target. The reference must be a symbolic reference,\n otherwise this will fail.

\n", + "comments": "

The new reference will be written to disk, overwriting the given reference.

\n\n

The target name will be checked for validity. See git_reference_symbolic_create() for rules about valid names.

\n\n

The message for the reflog will be ignored if the reference does not belong in the standard set (HEAD, branches and remote-tracking branches) and and it does not have a reflog.

\n", + "group": "reference" + }, + "git_reference_set_target": { + "type": "function", + "file": "refs.h", + "line": 349, + "lineto": 353, + "args": [ + { + "name": "out", + "type": "git_reference **", + "comment": "Pointer to the newly created reference" + }, + { + "name": "ref", + "type": "git_reference *", + "comment": "The reference" + }, + { + "name": "id", + "type": "const git_oid *", + "comment": "The new target OID for the reference" + }, + { + "name": "log_message", + "type": "const char *", + "comment": "The one line long message to be appended to the reflog" + } + ], + "argline": "git_reference **out, git_reference *ref, const git_oid *id, const char *log_message", + "sig": "git_reference **::git_reference *::const git_oid *::const char *", + "return": { + "type": "int", + "comment": " 0 on success, GIT_EMODIFIED if the value of the reference\n has changed since it was read, or an error code" + }, + "description": "

Conditionally create a new reference with the same name as the given reference but a\n different OID target. The reference must be a direct reference, otherwise\n this will fail.

\n", + "comments": "

The new reference will be written to disk, overwriting the given reference.

\n", + "group": "reference" + }, + "git_reference_rename": { + "type": "function", + "file": "refs.h", + "line": 378, + "lineto": 383, + "args": [ + { + "name": "new_ref", + "type": "git_reference **", + "comment": null + }, + { + "name": "ref", + "type": "git_reference *", + "comment": "The reference to rename" + }, + { + "name": "new_name", + "type": "const char *", + "comment": "The new name for the reference" + }, + { + "name": "force", + "type": "int", + "comment": "Overwrite an existing reference" + }, + { + "name": "log_message", + "type": "const char *", + "comment": "The one line long message to be appended to the reflog" + } + ], + "argline": "git_reference **new_ref, git_reference *ref, const char *new_name, int force, const char *log_message", + "sig": "git_reference **::git_reference *::const char *::int::const char *", + "return": { + "type": "int", + "comment": " 0 on success, GIT_EINVALIDSPEC, GIT_EEXISTS or an error code" + }, + "description": "

Rename an existing reference.

\n", + "comments": "

This method works for both direct and symbolic references.

\n\n

The new name will be checked for validity. See git_reference_symbolic_create() for rules about valid names.

\n\n

If the force flag is not enabled, and there's already a reference with the given name, the renaming will fail.

\n\n

IMPORTANT: The user needs to write a proper reflog entry if the reflog is enabled for the repository. We only rename the reflog if it exists.

\n", + "group": "reference" + }, + "git_reference_delete": { + "type": "function", + "file": "refs.h", + "line": 398, + "lineto": 398, + "args": [ + { + "name": "ref", + "type": "git_reference *", + "comment": "The reference to remove" + } + ], + "argline": "git_reference *ref", + "sig": "git_reference *", + "return": { + "type": "int", + "comment": " 0, GIT_EMODIFIED or an error code" + }, + "description": "

Delete an existing reference.

\n", + "comments": "

This method works for both direct and symbolic references. The reference will be immediately removed on disk but the memory will not be freed. Callers must call git_reference_free.

\n\n

This function will return an error if the reference has changed from the time it was looked up.

\n", + "group": "reference" + }, + "git_reference_remove": { + "type": "function", + "file": "refs.h", + "line": 409, + "lineto": 409, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": null + }, + { + "name": "name", + "type": "const char *", + "comment": "The reference to remove" + } + ], + "argline": "git_repository *repo, const char *name", + "sig": "git_repository *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Delete an existing reference by name

\n", + "comments": "

This method removes the named reference from the repository without looking at its old value.

\n", + "group": "reference" + }, + "git_reference_list": { + "type": "function", + "file": "refs.h", + "line": 423, + "lineto": 423, + "args": [ + { + "name": "array", + "type": "git_strarray *", + "comment": "Pointer to a git_strarray structure where\n\t\tthe reference names will be stored" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "Repository where to find the refs" + } + ], + "argline": "git_strarray *array, git_repository *repo", + "sig": "git_strarray *::git_repository *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Fill a list with all the references that can be found in a repository.

\n", + "comments": "

The string array will be filled with the names of all references; these values are owned by the user and should be free'd manually when no longer needed, using git_strarray_free().

\n", + "group": "reference", + "examples": { + "general.c": [ + "ex/HEAD/general.html#git_reference_list-57" + ] + } + }, + "git_reference_foreach": { + "type": "function", + "file": "refs.h", + "line": 441, + "lineto": 444, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "Repository where to find the refs" + }, + { + "name": "callback", + "type": "git_reference_foreach_cb", + "comment": "Function which will be called for every listed ref" + }, + { + "name": "payload", + "type": "void *", + "comment": "Additional data to pass to the callback" + } + ], + "argline": "git_repository *repo, git_reference_foreach_cb callback, void *payload", + "sig": "git_repository *::git_reference_foreach_cb::void *", + "return": { + "type": "int", + "comment": " 0 on success, non-zero callback return value, or error code" + }, + "description": "

Perform a callback on each reference in the repository.

\n", + "comments": "

The callback function will be called for each reference in the repository, receiving the reference object and the payload value passed to this method. Returning a non-zero value from the callback will terminate the iteration.

\n", + "group": "reference" + }, + "git_reference_foreach_name": { + "type": "function", + "file": "refs.h", + "line": 459, + "lineto": 462, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "Repository where to find the refs" + }, + { + "name": "callback", + "type": "git_reference_foreach_name_cb", + "comment": "Function which will be called for every listed ref name" + }, + { + "name": "payload", + "type": "void *", + "comment": "Additional data to pass to the callback" + } + ], + "argline": "git_repository *repo, git_reference_foreach_name_cb callback, void *payload", + "sig": "git_repository *::git_reference_foreach_name_cb::void *", + "return": { + "type": "int", + "comment": " 0 on success, non-zero callback return value, or error code" + }, + "description": "

Perform a callback on the fully-qualified name of each reference.

\n", + "comments": "

The callback function will be called for each reference in the repository, receiving the name of the reference and the payload value passed to this method. Returning a non-zero value from the callback will terminate the iteration.

\n", + "group": "reference" + }, + "git_reference_free": { + "type": "function", + "file": "refs.h", + "line": 469, + "lineto": 469, + "args": [ + { + "name": "ref", + "type": "git_reference *", + "comment": "git_reference" + } + ], + "argline": "git_reference *ref", + "sig": "git_reference *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Free the given reference.

\n", + "comments": "", + "group": "reference", + "examples": { + "status.c": [ + "ex/HEAD/status.html#git_reference_free-3" + ] + } + }, + "git_reference_cmp": { + "type": "function", + "file": "refs.h", + "line": 478, + "lineto": 480, + "args": [ + { + "name": "ref1", + "type": "const git_reference *", + "comment": "The first git_reference" + }, + { + "name": "ref2", + "type": "const git_reference *", + "comment": "The second git_reference" + } + ], + "argline": "const git_reference *ref1, const git_reference *ref2", + "sig": "const git_reference *::const git_reference *", + "return": { + "type": "int", + "comment": " 0 if the same, else a stable but meaningless ordering." + }, + "description": "

Compare two references.

\n", + "comments": "", + "group": "reference" + }, + "git_reference_iterator_new": { + "type": "function", + "file": "refs.h", + "line": 489, + "lineto": 491, + "args": [ + { + "name": "out", + "type": "git_reference_iterator **", + "comment": "pointer in which to store the iterator" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository" + } + ], + "argline": "git_reference_iterator **out, git_repository *repo", + "sig": "git_reference_iterator **::git_repository *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Create an iterator for the repo's references

\n", + "comments": "", + "group": "reference" + }, + "git_reference_iterator_glob_new": { + "type": "function", + "file": "refs.h", + "line": 502, + "lineto": 505, + "args": [ + { + "name": "out", + "type": "git_reference_iterator **", + "comment": "pointer in which to store the iterator" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository" + }, + { + "name": "glob", + "type": "const char *", + "comment": "the glob to match against the reference names" + } + ], + "argline": "git_reference_iterator **out, git_repository *repo, const char *glob", + "sig": "git_reference_iterator **::git_repository *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Create an iterator for the repo's references that match the\n specified glob

\n", + "comments": "", + "group": "reference" + }, + "git_reference_next": { + "type": "function", + "file": "refs.h", + "line": 514, + "lineto": 514, + "args": [ + { + "name": "out", + "type": "git_reference **", + "comment": "pointer in which to store the reference" + }, + { + "name": "iter", + "type": "git_reference_iterator *", + "comment": "the iterator" + } + ], + "argline": "git_reference **out, git_reference_iterator *iter", + "sig": "git_reference **::git_reference_iterator *", + "return": { + "type": "int", + "comment": " 0, GIT_ITEROVER if there are no more; or an error code" + }, + "description": "

Get the next reference

\n", + "comments": "", + "group": "reference" + }, + "git_reference_next_name": { + "type": "function", + "file": "refs.h", + "line": 527, + "lineto": 527, + "args": [ + { + "name": "out", + "type": "const char **", + "comment": "pointer in which to store the string" + }, + { + "name": "iter", + "type": "git_reference_iterator *", + "comment": "the iterator" + } + ], + "argline": "const char **out, git_reference_iterator *iter", + "sig": "const char **::git_reference_iterator *", + "return": { + "type": "int", + "comment": " 0, GIT_ITEROVER if there are no more; or an error code" + }, + "description": "

Get the next reference's name

\n", + "comments": "

This function is provided for convenience in case only the names are interesting as it avoids the allocation of the git_reference object which git_reference_next() needs.

\n", + "group": "reference" + }, + "git_reference_iterator_free": { + "type": "function", + "file": "refs.h", + "line": 534, + "lineto": 534, + "args": [ + { + "name": "iter", + "type": "git_reference_iterator *", + "comment": "the iterator to free" + } + ], + "argline": "git_reference_iterator *iter", + "sig": "git_reference_iterator *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Free the iterator and its associated resources

\n", + "comments": "", + "group": "reference" + }, + "git_reference_foreach_glob": { + "type": "function", + "file": "refs.h", + "line": 554, + "lineto": 558, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "Repository where to find the refs" + }, + { + "name": "glob", + "type": "const char *", + "comment": "Pattern to match (fnmatch-style) against reference name." + }, + { + "name": "callback", + "type": "git_reference_foreach_name_cb", + "comment": "Function which will be called for every listed ref" + }, + { + "name": "payload", + "type": "void *", + "comment": "Additional data to pass to the callback" + } + ], + "argline": "git_repository *repo, const char *glob, git_reference_foreach_name_cb callback, void *payload", + "sig": "git_repository *::const char *::git_reference_foreach_name_cb::void *", + "return": { + "type": "int", + "comment": " 0 on success, GIT_EUSER on non-zero callback, or error code" + }, + "description": "

Perform a callback on each reference in the repository whose name\n matches the given pattern.

\n", + "comments": "

This function acts like git_reference_foreach() with an additional pattern match being applied to the reference name before issuing the callback function. See that function for more information.

\n\n

The pattern is matched using fnmatch or "glob" style where a '*' matches any sequence of letters, a '?' matches any letter, and square brackets can be used to define character ranges (such as "[0-9]" for digits).

\n", + "group": "reference" + }, + "git_reference_has_log": { + "type": "function", + "file": "refs.h", + "line": 568, + "lineto": 568, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository" + }, + { + "name": "refname", + "type": "const char *", + "comment": "the reference's name" + } + ], + "argline": "git_repository *repo, const char *refname", + "sig": "git_repository *::const char *", + "return": { + "type": "int", + "comment": " 0 when no reflog can be found, 1 when it exists;\n otherwise an error code." + }, + "description": "

Check if a reflog exists for the specified reference.

\n", + "comments": "", + "group": "reference" + }, + "git_reference_ensure_log": { + "type": "function", + "file": "refs.h", + "line": 580, + "lineto": 580, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository" + }, + { + "name": "refname", + "type": "const char *", + "comment": "the reference's name" + } + ], + "argline": "git_repository *repo, const char *refname", + "sig": "git_repository *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code." + }, + "description": "

Ensure there is a reflog for a particular reference.

\n", + "comments": "

Make sure that successive updates to the reference will append to its log.

\n", + "group": "reference" + }, + "git_reference_is_branch": { + "type": "function", + "file": "refs.h", + "line": 590, + "lineto": 590, + "args": [ + { + "name": "ref", + "type": "const git_reference *", + "comment": "A git reference" + } + ], + "argline": "const git_reference *ref", + "sig": "const git_reference *", + "return": { + "type": "int", + "comment": " 1 when the reference lives in the refs/heads\n namespace; 0 otherwise." + }, + "description": "

Check if a reference is a local branch.

\n", + "comments": "", + "group": "reference" + }, + "git_reference_is_remote": { + "type": "function", + "file": "refs.h", + "line": 600, + "lineto": 600, + "args": [ + { + "name": "ref", + "type": "const git_reference *", + "comment": "A git reference" + } + ], + "argline": "const git_reference *ref", + "sig": "const git_reference *", + "return": { + "type": "int", + "comment": " 1 when the reference lives in the refs/remotes\n namespace; 0 otherwise." + }, + "description": "

Check if a reference is a remote tracking branch

\n", + "comments": "", + "group": "reference" + }, + "git_reference_is_tag": { + "type": "function", + "file": "refs.h", + "line": 610, + "lineto": 610, + "args": [ + { + "name": "ref", + "type": "const git_reference *", + "comment": "A git reference" + } + ], + "argline": "const git_reference *ref", + "sig": "const git_reference *", + "return": { + "type": "int", + "comment": " 1 when the reference lives in the refs/tags\n namespace; 0 otherwise." + }, + "description": "

Check if a reference is a tag

\n", + "comments": "", + "group": "reference" + }, + "git_reference_is_note": { + "type": "function", + "file": "refs.h", + "line": 620, + "lineto": 620, + "args": [ + { + "name": "ref", + "type": "const git_reference *", + "comment": "A git reference" + } + ], + "argline": "const git_reference *ref", + "sig": "const git_reference *", + "return": { + "type": "int", + "comment": " 1 when the reference lives in the refs/notes\n namespace; 0 otherwise." + }, + "description": "

Check if a reference is a note

\n", + "comments": "", + "group": "reference" + }, + "git_reference_normalize_name": { + "type": "function", + "file": "refs.h", + "line": 676, + "lineto": 680, + "args": [ + { + "name": "buffer_out", + "type": "char *", + "comment": "User allocated buffer to store normalized name" + }, + { + "name": "buffer_size", + "type": "size_t", + "comment": "Size of buffer_out" + }, + { + "name": "name", + "type": "const char *", + "comment": "Reference name to be checked." + }, + { + "name": "flags", + "type": "unsigned int", + "comment": "Flags to constrain name validation rules - see the\n GIT_REF_FORMAT constants above." + } + ], + "argline": "char *buffer_out, size_t buffer_size, const char *name, unsigned int flags", + "sig": "char *::size_t::const char *::unsigned int", + "return": { + "type": "int", + "comment": " 0 on success, GIT_EBUFS if buffer is too small, GIT_EINVALIDSPEC\n or an error code." + }, + "description": "

Normalize reference name and check validity.

\n", + "comments": "

This will normalize the reference name by removing any leading slash '/' characters and collapsing runs of adjacent slashes between name components into a single slash.

\n\n

Once normalized, if the reference name is valid, it will be returned in the user allocated buffer.

\n\n

See git_reference_symbolic_create() for rules about valid names.

\n", + "group": "reference" + }, + "git_reference_peel": { + "type": "function", + "file": "refs.h", + "line": 697, + "lineto": 700, + "args": [ + { + "name": "out", + "type": "git_object **", + "comment": "Pointer to the peeled git_object" + }, + { + "name": "ref", + "type": "git_reference *", + "comment": "The reference to be processed" + }, + { + "name": "type", + "type": "git_otype", + "comment": "The type of the requested object (GIT_OBJ_COMMIT,\n GIT_OBJ_TAG, GIT_OBJ_TREE, GIT_OBJ_BLOB or GIT_OBJ_ANY)." + } + ], + "argline": "git_object **out, git_reference *ref, git_otype type", + "sig": "git_object **::git_reference *::git_otype", + "return": { + "type": "int", + "comment": " 0 on success, GIT_EAMBIGUOUS, GIT_ENOTFOUND or an error code" + }, + "description": "

Recursively peel reference until object of the specified type is found.

\n", + "comments": "

The retrieved peeled object is owned by the repository and should be closed with the git_object_free method.

\n\n

If you pass GIT_OBJ_ANY as the target type, then the object will be peeled until a non-tag object is met.

\n", + "group": "reference" + }, + "git_reference_is_valid_name": { + "type": "function", + "file": "refs.h", + "line": 716, + "lineto": 716, + "args": [ + { + "name": "refname", + "type": "const char *", + "comment": "name to be checked." + } + ], + "argline": "const char *refname", + "sig": "const char *", + "return": { + "type": "int", + "comment": " 1 if the reference name is acceptable; 0 if it isn't" + }, + "description": "

Ensure the reference name is well-formed.

\n", + "comments": "

Valid reference names must follow one of two patterns:

\n\n
    \n
  1. Top-level names must contain only capital letters and underscores, and must begin and end with a letter. (e.g. "HEAD", "ORIG_HEAD"). 2. Names prefixed with "refs/" can be almost anything. You must avoid the characters '~', '^', ':', '\\', '?', '[', and '*', and the sequences ".." and "@{" which have special meaning to revparse.
  2. \n
\n", + "group": "reference" + }, + "git_reference_shorthand": { + "type": "function", + "file": "refs.h", + "line": 730, + "lineto": 730, + "args": [ + { + "name": "ref", + "type": "const git_reference *", + "comment": "a reference" + } + ], + "argline": "const git_reference *ref", + "sig": "const git_reference *", + "return": { + "type": "const char *", + "comment": " the human-readable version of the name" + }, + "description": "

Get the reference's short name

\n", + "comments": "

This will transform the reference name into a name "human-readable" version. If no shortname is appropriate, it will return the full name.

\n\n

The memory is owned by the reference and must not be freed.

\n", + "group": "reference", + "examples": { + "status.c": [ + "ex/HEAD/status.html#git_reference_shorthand-4" + ] + } + }, + "git_refspec_src": { + "type": "function", + "file": "refspec.h", + "line": 30, + "lineto": 30, + "args": [ + { + "name": "refspec", + "type": "const git_refspec *", + "comment": "the refspec" + } + ], + "argline": "const git_refspec *refspec", + "sig": "const git_refspec *", + "return": { + "type": "const char *", + "comment": " the refspec's source specifier" + }, + "description": "

Get the source specifier

\n", + "comments": "", + "group": "refspec" + }, + "git_refspec_dst": { + "type": "function", + "file": "refspec.h", + "line": 38, + "lineto": 38, + "args": [ + { + "name": "refspec", + "type": "const git_refspec *", + "comment": "the refspec" + } + ], + "argline": "const git_refspec *refspec", + "sig": "const git_refspec *", + "return": { + "type": "const char *", + "comment": " the refspec's destination specifier" + }, + "description": "

Get the destination specifier

\n", + "comments": "", + "group": "refspec" + }, + "git_refspec_string": { + "type": "function", + "file": "refspec.h", + "line": 46, + "lineto": 46, + "args": [ + { + "name": "refspec", + "type": "const git_refspec *", + "comment": "the refspec" + } + ], + "argline": "const git_refspec *refspec", + "sig": "const git_refspec *", + "return": { + "type": "const char *", + "comment": null + }, + "description": "

Get the refspec's string

\n", + "comments": "", + "group": "refspec" + }, + "git_refspec_force": { + "type": "function", + "file": "refspec.h", + "line": 54, + "lineto": 54, + "args": [ + { + "name": "refspec", + "type": "const git_refspec *", + "comment": "the refspec" + } + ], + "argline": "const git_refspec *refspec", + "sig": "const git_refspec *", + "return": { + "type": "int", + "comment": " 1 if force update has been set, 0 otherwise" + }, + "description": "

Get the force update setting

\n", + "comments": "", + "group": "refspec" + }, + "git_refspec_direction": { + "type": "function", + "file": "refspec.h", + "line": 62, + "lineto": 62, + "args": [ + { + "name": "spec", + "type": "const git_refspec *", + "comment": "refspec" + } + ], + "argline": "const git_refspec *spec", + "sig": "const git_refspec *", + "return": { + "type": "git_direction", + "comment": " GIT_DIRECTION_FETCH or GIT_DIRECTION_PUSH" + }, + "description": "

Get the refspec's direction.

\n", + "comments": "", + "group": "refspec" + }, + "git_refspec_src_matches": { + "type": "function", + "file": "refspec.h", + "line": 71, + "lineto": 71, + "args": [ + { + "name": "refspec", + "type": "const git_refspec *", + "comment": "the refspec" + }, + { + "name": "refname", + "type": "const char *", + "comment": "the name of the reference to check" + } + ], + "argline": "const git_refspec *refspec, const char *refname", + "sig": "const git_refspec *::const char *", + "return": { + "type": "int", + "comment": " 1 if the refspec matches, 0 otherwise" + }, + "description": "

Check if a refspec's source descriptor matches a reference

\n", + "comments": "", + "group": "refspec" + }, + "git_refspec_dst_matches": { + "type": "function", + "file": "refspec.h", + "line": 80, + "lineto": 80, + "args": [ + { + "name": "refspec", + "type": "const git_refspec *", + "comment": "the refspec" + }, + { + "name": "refname", + "type": "const char *", + "comment": "the name of the reference to check" + } + ], + "argline": "const git_refspec *refspec, const char *refname", + "sig": "const git_refspec *::const char *", + "return": { + "type": "int", + "comment": " 1 if the refspec matches, 0 otherwise" + }, + "description": "

Check if a refspec's destination descriptor matches a reference

\n", + "comments": "", + "group": "refspec" + }, + "git_refspec_transform": { + "type": "function", + "file": "refspec.h", + "line": 90, + "lineto": 90, + "args": [ + { + "name": "out", + "type": "git_buf *", + "comment": "where to store the target name" + }, + { + "name": "spec", + "type": "const git_refspec *", + "comment": "the refspec" + }, + { + "name": "name", + "type": "const char *", + "comment": "the name of the reference to transform" + } + ], + "argline": "git_buf *out, const git_refspec *spec, const char *name", + "sig": "git_buf *::const git_refspec *::const char *", + "return": { + "type": "int", + "comment": " 0, GIT_EBUFS or another error" + }, + "description": "

Transform a reference to its target following the refspec's rules

\n", + "comments": "", + "group": "refspec" + }, + "git_refspec_rtransform": { + "type": "function", + "file": "refspec.h", + "line": 100, + "lineto": 100, + "args": [ + { + "name": "out", + "type": "git_buf *", + "comment": "where to store the source reference name" + }, + { + "name": "spec", + "type": "const git_refspec *", + "comment": "the refspec" + }, + { + "name": "name", + "type": "const char *", + "comment": "the name of the reference to transform" + } + ], + "argline": "git_buf *out, const git_refspec *spec, const char *name", + "sig": "git_buf *::const git_refspec *::const char *", + "return": { + "type": "int", + "comment": " 0, GIT_EBUFS or another error" + }, + "description": "

Transform a target reference to its source reference following the refspec's rules

\n", + "comments": "", + "group": "refspec" + }, + "git_remote_create": { + "type": "function", + "file": "remote.h", + "line": 40, + "lineto": 44, + "args": [ + { + "name": "out", + "type": "git_remote **", + "comment": "the resulting remote" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository in which to create the remote" + }, + { + "name": "name", + "type": "const char *", + "comment": "the remote's name" + }, + { + "name": "url", + "type": "const char *", + "comment": "the remote's url" + } + ], + "argline": "git_remote **out, git_repository *repo, const char *name, const char *url", + "sig": "git_remote **::git_repository *::const char *::const char *", + "return": { + "type": "int", + "comment": " 0, GIT_EINVALIDSPEC, GIT_EEXISTS or an error code" + }, + "description": "

Add a remote with the default fetch refspec to the repository's configuration.

\n", + "comments": "", + "group": "remote", + "examples": { + "remote.c": [ + "ex/HEAD/remote.html#git_remote_create-4" + ] + } + }, + "git_remote_create_with_fetchspec": { + "type": "function", + "file": "remote.h", + "line": 57, + "lineto": 62, + "args": [ + { + "name": "out", + "type": "git_remote **", + "comment": "the resulting remote" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository in which to create the remote" + }, + { + "name": "name", + "type": "const char *", + "comment": "the remote's name" + }, + { + "name": "url", + "type": "const char *", + "comment": "the remote's url" + }, + { + "name": "fetch", + "type": "const char *", + "comment": "the remote fetch value" + } + ], + "argline": "git_remote **out, git_repository *repo, const char *name, const char *url, const char *fetch", + "sig": "git_remote **::git_repository *::const char *::const char *::const char *", + "return": { + "type": "int", + "comment": " 0, GIT_EINVALIDSPEC, GIT_EEXISTS or an error code" + }, + "description": "

Add a remote with the provided fetch refspec (or default if NULL) to the repository's\n configuration.

\n", + "comments": "", + "group": "remote" + }, + "git_remote_create_anonymous": { + "type": "function", + "file": "remote.h", + "line": 75, + "lineto": 78, + "args": [ + { + "name": "out", + "type": "git_remote **", + "comment": "pointer to the new remote objects" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the associated repository" + }, + { + "name": "url", + "type": "const char *", + "comment": "the remote repository's URL" + } + ], + "argline": "git_remote **out, git_repository *repo, const char *url", + "sig": "git_remote **::git_repository *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Create an anonymous remote

\n", + "comments": "

Create a remote with the given url in-memory. You can use this when you have a URL instead of a remote's name.

\n", + "group": "remote", + "examples": { + "network/fetch.c": [ + "ex/HEAD/network/fetch.html#git_remote_create_anonymous-4" + ], + "network/ls-remote.c": [ + "ex/HEAD/network/ls-remote.html#git_remote_create_anonymous-2" + ] + } + }, + "git_remote_lookup": { + "type": "function", + "file": "remote.h", + "line": 91, + "lineto": 91, + "args": [ + { + "name": "out", + "type": "git_remote **", + "comment": "pointer to the new remote object" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the associated repository" + }, + { + "name": "name", + "type": "const char *", + "comment": "the remote's name" + } + ], + "argline": "git_remote **out, git_repository *repo, const char *name", + "sig": "git_remote **::git_repository *::const char *", + "return": { + "type": "int", + "comment": " 0, GIT_ENOTFOUND, GIT_EINVALIDSPEC or an error code" + }, + "description": "

Get the information for a particular remote

\n", + "comments": "

The name will be checked for validity. See git_tag_create() for rules about valid names.

\n", + "group": "remote", + "examples": { + "network/fetch.c": [ + "ex/HEAD/network/fetch.html#git_remote_lookup-5" + ], + "network/ls-remote.c": [ + "ex/HEAD/network/ls-remote.html#git_remote_lookup-3" + ], + "remote.c": [ + "ex/HEAD/remote.html#git_remote_lookup-5" + ] + } + }, + "git_remote_dup": { + "type": "function", + "file": "remote.h", + "line": 103, + "lineto": 103, + "args": [ + { + "name": "dest", + "type": "git_remote **", + "comment": "pointer where to store the copy" + }, + { + "name": "source", + "type": "git_remote *", + "comment": "object to copy" + } + ], + "argline": "git_remote **dest, git_remote *source", + "sig": "git_remote **::git_remote *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Create a copy of an existing remote. All internal strings are also\n duplicated. Callbacks are not duplicated.

\n", + "comments": "

Call git_remote_free to free the data.

\n", + "group": "remote" + }, + "git_remote_owner": { + "type": "function", + "file": "remote.h", + "line": 111, + "lineto": 111, + "args": [ + { + "name": "remote", + "type": "const git_remote *", + "comment": "the remote" + } + ], + "argline": "const git_remote *remote", + "sig": "const git_remote *", + "return": { + "type": "git_repository *", + "comment": " a pointer to the repository" + }, + "description": "

Get the remote's repository

\n", + "comments": "", + "group": "remote" + }, + "git_remote_name": { + "type": "function", + "file": "remote.h", + "line": 119, + "lineto": 119, + "args": [ + { + "name": "remote", + "type": "const git_remote *", + "comment": "the remote" + } + ], + "argline": "const git_remote *remote", + "sig": "const git_remote *", + "return": { + "type": "const char *", + "comment": " a pointer to the name or NULL for in-memory remotes" + }, + "description": "

Get the remote's name

\n", + "comments": "", + "group": "remote" + }, + "git_remote_url": { + "type": "function", + "file": "remote.h", + "line": 130, + "lineto": 130, + "args": [ + { + "name": "remote", + "type": "const git_remote *", + "comment": "the remote" + } + ], + "argline": "const git_remote *remote", + "sig": "const git_remote *", + "return": { + "type": "const char *", + "comment": " a pointer to the url" + }, + "description": "

Get the remote's url

\n", + "comments": "

If url.*.insteadOf has been configured for this URL, it will return the modified URL.

\n", + "group": "remote", + "examples": { + "remote.c": [ + "ex/HEAD/remote.html#git_remote_url-6" + ] + } + }, + "git_remote_pushurl": { + "type": "function", + "file": "remote.h", + "line": 141, + "lineto": 141, + "args": [ + { + "name": "remote", + "type": "const git_remote *", + "comment": "the remote" + } + ], + "argline": "const git_remote *remote", + "sig": "const git_remote *", + "return": { + "type": "const char *", + "comment": " a pointer to the url or NULL if no special url for pushing is set" + }, + "description": "

Get the remote's url for pushing

\n", + "comments": "

If url.*.pushInsteadOf has been configured for this URL, it will return the modified URL.

\n", + "group": "remote", + "examples": { + "remote.c": [ + "ex/HEAD/remote.html#git_remote_pushurl-7" + ] + } + }, + "git_remote_set_url": { + "type": "function", + "file": "remote.h", + "line": 154, + "lineto": 154, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository in which to perform the change" + }, + { + "name": "remote", + "type": "const char *", + "comment": "the remote's name" + }, + { + "name": "url", + "type": "const char *", + "comment": "the url to set" + } + ], + "argline": "git_repository *repo, const char *remote, const char *url", + "sig": "git_repository *::const char *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error value" + }, + "description": "

Set the remote's url in the configuration

\n", + "comments": "

Remote objects already in memory will not be affected. This assumes the common case of a single-url remote and will otherwise return an error.

\n", + "group": "remote", + "examples": { + "remote.c": [ + "ex/HEAD/remote.html#git_remote_set_url-8" + ] + } + }, + "git_remote_set_pushurl": { + "type": "function", + "file": "remote.h", + "line": 167, + "lineto": 167, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository in which to perform the change" + }, + { + "name": "remote", + "type": "const char *", + "comment": "the remote's name" + }, + { + "name": "url", + "type": "const char *", + "comment": "the url to set" + } + ], + "argline": "git_repository *repo, const char *remote, const char *url", + "sig": "git_repository *::const char *::const char *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Set the remote's url for pushing in the configuration.

\n", + "comments": "

Remote objects already in memory will not be affected. This assumes the common case of a single-url remote and will otherwise return an error.

\n", + "group": "remote", + "examples": { + "remote.c": [ + "ex/HEAD/remote.html#git_remote_set_pushurl-9" + ] + } + }, + "git_remote_add_fetch": { + "type": "function", + "file": "remote.h", + "line": 180, + "lineto": 180, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository in which to change the configuration" + }, + { + "name": "remote", + "type": "const char *", + "comment": "the name of the remote to change" + }, + { + "name": "refspec", + "type": "const char *", + "comment": "the new fetch refspec" + } + ], + "argline": "git_repository *repo, const char *remote, const char *refspec", + "sig": "git_repository *::const char *::const char *", + "return": { + "type": "int", + "comment": " 0, GIT_EINVALIDSPEC if refspec is invalid or an error value" + }, + "description": "

Add a fetch refspec to the remote's configuration

\n", + "comments": "

Add the given refspec to the fetch list in the configuration. No loaded remote instances will be affected.

\n", + "group": "remote" + }, + "git_remote_get_fetch_refspecs": { + "type": "function", + "file": "remote.h", + "line": 191, + "lineto": 191, + "args": [ + { + "name": "array", + "type": "git_strarray *", + "comment": "pointer to the array in which to store the strings" + }, + { + "name": "remote", + "type": "const git_remote *", + "comment": "the remote to query" + } + ], + "argline": "git_strarray *array, const git_remote *remote", + "sig": "git_strarray *::const git_remote *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Get the remote's list of fetch refspecs

\n", + "comments": "

The memory is owned by the user and should be freed with git_strarray_free.

\n", + "group": "remote" + }, + "git_remote_add_push": { + "type": "function", + "file": "remote.h", + "line": 204, + "lineto": 204, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository in which to change the configuration" + }, + { + "name": "remote", + "type": "const char *", + "comment": "the name of the remote to change" + }, + { + "name": "refspec", + "type": "const char *", + "comment": "the new push refspec" + } + ], + "argline": "git_repository *repo, const char *remote, const char *refspec", + "sig": "git_repository *::const char *::const char *", + "return": { + "type": "int", + "comment": " 0, GIT_EINVALIDSPEC if refspec is invalid or an error value" + }, + "description": "

Add a push refspec to the remote's configuration

\n", + "comments": "

Add the given refspec to the push list in the configuration. No loaded remote instances will be affected.

\n", + "group": "remote" + }, + "git_remote_get_push_refspecs": { + "type": "function", + "file": "remote.h", + "line": 215, + "lineto": 215, + "args": [ + { + "name": "array", + "type": "git_strarray *", + "comment": "pointer to the array in which to store the strings" + }, + { + "name": "remote", + "type": "const git_remote *", + "comment": "the remote to query" + } + ], + "argline": "git_strarray *array, const git_remote *remote", + "sig": "git_strarray *::const git_remote *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Get the remote's list of push refspecs

\n", + "comments": "

The memory is owned by the user and should be freed with git_strarray_free.

\n", + "group": "remote" + }, + "git_remote_refspec_count": { + "type": "function", + "file": "remote.h", + "line": 223, + "lineto": 223, + "args": [ + { + "name": "remote", + "type": "const git_remote *", + "comment": "the remote" + } + ], + "argline": "const git_remote *remote", + "sig": "const git_remote *", + "return": { + "type": "size_t", + "comment": " the amount of refspecs configured in this remote" + }, + "description": "

Get the number of refspecs for a remote

\n", + "comments": "", + "group": "remote" + }, + "git_remote_get_refspec": { + "type": "function", + "file": "remote.h", + "line": 232, + "lineto": 232, + "args": [ + { + "name": "remote", + "type": "const git_remote *", + "comment": "the remote to query" + }, + { + "name": "n", + "type": "size_t", + "comment": "the refspec to get" + } + ], + "argline": "const git_remote *remote, size_t n", + "sig": "const git_remote *::size_t", + "return": { + "type": "const git_refspec *", + "comment": " the nth refspec" + }, + "description": "

Get a refspec from the remote

\n", + "comments": "", + "group": "remote" + }, + "git_remote_connect": { + "type": "function", + "file": "remote.h", + "line": 249, + "lineto": 249, + "args": [ + { + "name": "remote", + "type": "git_remote *", + "comment": "the remote to connect to" + }, + { + "name": "direction", + "type": "git_direction", + "comment": "GIT_DIRECTION_FETCH if you want to fetch or\n GIT_DIRECTION_PUSH if you want to push" + }, + { + "name": "callbacks", + "type": "const git_remote_callbacks *", + "comment": "the callbacks to use for this connection" + }, + { + "name": "proxy_opts", + "type": "const git_proxy_options *", + "comment": "proxy settings" + }, + { + "name": "custom_headers", + "type": "const git_strarray *", + "comment": "extra HTTP headers to use in this connection" + } + ], + "argline": "git_remote *remote, git_direction direction, const git_remote_callbacks *callbacks, const git_proxy_options *proxy_opts, const git_strarray *custom_headers", + "sig": "git_remote *::git_direction::const git_remote_callbacks *::const git_proxy_options *::const git_strarray *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Open a connection to a remote

\n", + "comments": "

The transport is selected based on the URL. The direction argument is due to a limitation of the git protocol (over TCP or SSH) which starts up a specific binary which can only do the one or the other.

\n", + "group": "remote", + "examples": { + "network/ls-remote.c": [ + "ex/HEAD/network/ls-remote.html#git_remote_connect-4" + ] + } + }, + "git_remote_ls": { + "type": "function", + "file": "remote.h", + "line": 271, + "lineto": 271, + "args": [ + { + "name": "out", + "type": "const git_remote_head ***", + "comment": "pointer to the array" + }, + { + "name": "size", + "type": "size_t *", + "comment": "the number of remote heads" + }, + { + "name": "remote", + "type": "git_remote *", + "comment": "the remote" + } + ], + "argline": "const git_remote_head ***out, size_t *size, git_remote *remote", + "sig": "const git_remote_head ***::size_t *::git_remote *", + "return": { + "type": "int", + "comment": " 0 on success, or an error code" + }, + "description": "

Get the remote repository's reference advertisement list

\n", + "comments": "

Get the list of references with which the server responds to a new connection.

\n\n

The remote (or more exactly its transport) must have connected to the remote repository. This list is available as soon as the connection to the remote is initiated and it remains available after disconnecting.

\n\n

The memory belongs to the remote. The pointer will be valid as long as a new connection is not initiated, but it is recommended that you make a copy in order to make use of the data.

\n", + "group": "remote", + "examples": { + "network/ls-remote.c": [ + "ex/HEAD/network/ls-remote.html#git_remote_ls-5" + ] + } + }, + "git_remote_connected": { + "type": "function", + "file": "remote.h", + "line": 282, + "lineto": 282, + "args": [ + { + "name": "remote", + "type": "const git_remote *", + "comment": "the remote" + } + ], + "argline": "const git_remote *remote", + "sig": "const git_remote *", + "return": { + "type": "int", + "comment": " 1 if it's connected, 0 otherwise." + }, + "description": "

Check whether the remote is connected

\n", + "comments": "

Check whether the remote's underlying transport is connected to the remote host.

\n", + "group": "remote" + }, + "git_remote_stop": { + "type": "function", + "file": "remote.h", + "line": 292, + "lineto": 292, + "args": [ + { + "name": "remote", + "type": "git_remote *", + "comment": "the remote" + } + ], + "argline": "git_remote *remote", + "sig": "git_remote *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Cancel the operation

\n", + "comments": "

At certain points in its operation, the network code checks whether the operation has been cancelled and if so stops the operation.

\n", + "group": "remote" + }, + "git_remote_disconnect": { + "type": "function", + "file": "remote.h", + "line": 301, + "lineto": 301, + "args": [ + { + "name": "remote", + "type": "git_remote *", + "comment": "the remote to disconnect from" + } + ], + "argline": "git_remote *remote", + "sig": "git_remote *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Disconnect from the remote

\n", + "comments": "

Close the connection to the remote.

\n", + "group": "remote" + }, + "git_remote_free": { + "type": "function", + "file": "remote.h", + "line": 311, + "lineto": 311, + "args": [ + { + "name": "remote", + "type": "git_remote *", + "comment": "the remote to free" + } + ], + "argline": "git_remote *remote", + "sig": "git_remote *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Free the memory associated with a remote

\n", + "comments": "

This also disconnects from the remote, if the connection has not been closed yet (using git_remote_disconnect).

\n", + "group": "remote", + "examples": { + "network/fetch.c": [ + "ex/HEAD/network/fetch.html#git_remote_free-6", + "ex/HEAD/network/fetch.html#git_remote_free-7" + ], + "network/ls-remote.c": [ + "ex/HEAD/network/ls-remote.html#git_remote_free-6" + ], + "remote.c": [ + "ex/HEAD/remote.html#git_remote_free-10" + ] + } + }, + "git_remote_list": { + "type": "function", + "file": "remote.h", + "line": 322, + "lineto": 322, + "args": [ + { + "name": "out", + "type": "git_strarray *", + "comment": "a string array which receives the names of the remotes" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository to query" + } + ], + "argline": "git_strarray *out, git_repository *repo", + "sig": "git_strarray *::git_repository *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Get a list of the configured remotes for a repo

\n", + "comments": "

The string array must be freed by the user.

\n", + "group": "remote", + "examples": { + "remote.c": [ + "ex/HEAD/remote.html#git_remote_list-11" + ] + } + }, + "git_remote_init_callbacks": { + "type": "function", + "file": "remote.h", + "line": 473, + "lineto": 475, + "args": [ + { + "name": "opts", + "type": "git_remote_callbacks *", + "comment": "the `git_remote_callbacks` struct to initialize" + }, + { + "name": "version", + "type": "unsigned int", + "comment": "Version of struct; pass `GIT_REMOTE_CALLBACKS_VERSION`" + } + ], + "argline": "git_remote_callbacks *opts, unsigned int version", + "sig": "git_remote_callbacks *::unsigned int", + "return": { + "type": "int", + "comment": " Zero on success; -1 on failure." + }, + "description": "

Initializes a git_remote_callbacks with default values. Equivalent to\n creating an instance with GIT_REMOTE_CALLBACKS_INIT.

\n", + "comments": "", + "group": "remote" + }, + "git_fetch_init_options": { + "type": "function", + "file": "remote.h", + "line": 577, + "lineto": 579, + "args": [ + { + "name": "opts", + "type": "git_fetch_options *", + "comment": "the `git_push_options` instance to initialize." + }, + { + "name": "version", + "type": "unsigned int", + "comment": "the version of the struct; you should pass\n `GIT_FETCH_OPTIONS_VERSION` here." + } + ], + "argline": "git_fetch_options *opts, unsigned int version", + "sig": "git_fetch_options *::unsigned int", + "return": { + "type": "int", + "comment": " Zero on success; -1 on failure." + }, + "description": "

Initializes a git_fetch_options with default values. Equivalent to\n creating an instance with GIT_FETCH_OPTIONS_INIT.

\n", + "comments": "", + "group": "fetch" + }, + "git_push_init_options": { + "type": "function", + "file": "remote.h", + "line": 626, + "lineto": 628, + "args": [ + { + "name": "opts", + "type": "git_push_options *", + "comment": "the `git_push_options` instance to initialize." + }, + { + "name": "version", + "type": "unsigned int", + "comment": "the version of the struct; you should pass\n `GIT_PUSH_OPTIONS_VERSION` here." + } + ], + "argline": "git_push_options *opts, unsigned int version", + "sig": "git_push_options *::unsigned int", + "return": { + "type": "int", + "comment": " Zero on success; -1 on failure." + }, + "description": "

Initializes a git_push_options with default values. Equivalent to\n creating an instance with GIT_PUSH_OPTIONS_INIT.

\n", + "comments": "", + "group": "push" + }, + "git_remote_download": { + "type": "function", + "file": "remote.h", + "line": 646, + "lineto": 646, + "args": [ + { + "name": "remote", + "type": "git_remote *", + "comment": "the remote" + }, + { + "name": "refspecs", + "type": "const git_strarray *", + "comment": "the refspecs to use for this negotiation and\n download. Use NULL or an empty array to use the base refspecs" + }, + { + "name": "opts", + "type": "const git_fetch_options *", + "comment": "the options to use for this fetch" + } + ], + "argline": "git_remote *remote, const git_strarray *refspecs, const git_fetch_options *opts", + "sig": "git_remote *::const git_strarray *::const git_fetch_options *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Download and index the packfile

\n", + "comments": "

Connect to the remote if it hasn't been done yet, negotiate with the remote git which objects are missing, download and index the packfile.

\n\n

The .idx file will be created and both it and the packfile with be renamed to their final name.

\n", + "group": "remote" + }, + "git_remote_upload": { + "type": "function", + "file": "remote.h", + "line": 660, + "lineto": 660, + "args": [ + { + "name": "remote", + "type": "git_remote *", + "comment": "the remote" + }, + { + "name": "refspecs", + "type": "const git_strarray *", + "comment": "the refspecs to use for this negotiation and\n upload. Use NULL or an empty array to use the base refspecs" + }, + { + "name": "opts", + "type": "const git_push_options *", + "comment": "the options to use for this push" + } + ], + "argline": "git_remote *remote, const git_strarray *refspecs, const git_push_options *opts", + "sig": "git_remote *::const git_strarray *::const git_push_options *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Create a packfile and send it to the server

\n", + "comments": "

Connect to the remote if it hasn't been done yet, negotiate with the remote git which objects are missing, create a packfile with the missing objects and send it.

\n", + "group": "remote" + }, + "git_remote_update_tips": { + "type": "function", + "file": "remote.h", + "line": 676, + "lineto": 681, + "args": [ + { + "name": "remote", + "type": "git_remote *", + "comment": "the remote to update" + }, + { + "name": "callbacks", + "type": "const git_remote_callbacks *", + "comment": "pointer to the callback structure to use" + }, + { + "name": "update_fetchhead", + "type": "int", + "comment": "whether to write to FETCH_HEAD. Pass 1 to behave like git." + }, + { + "name": "download_tags", + "type": "git_remote_autotag_option_t", + "comment": "what the behaviour for downloading tags is for this fetch. This is\n ignored for push. This must be the same value passed to `git_remote_download()`." + }, + { + "name": "reflog_message", + "type": "const char *", + "comment": "The message to insert into the reflogs. If\n NULL and fetching, the default is \"fetch \n\", where \n is\n the name of the remote (or its url, for in-memory remotes). This\n parameter is ignored when pushing." + } + ], + "argline": "git_remote *remote, const git_remote_callbacks *callbacks, int update_fetchhead, git_remote_autotag_option_t download_tags, const char *reflog_message", + "sig": "git_remote *::const git_remote_callbacks *::int::git_remote_autotag_option_t::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Update the tips to the new state

\n", + "comments": "", + "group": "remote" + }, + "git_remote_fetch": { + "type": "function", + "file": "remote.h", + "line": 697, + "lineto": 701, + "args": [ + { + "name": "remote", + "type": "git_remote *", + "comment": "the remote to fetch from" + }, + { + "name": "refspecs", + "type": "const git_strarray *", + "comment": "the refspecs to use for this fetch. Pass NULL or an\n empty array to use the base refspecs." + }, + { + "name": "opts", + "type": "const git_fetch_options *", + "comment": "options to use for this fetch" + }, + { + "name": "reflog_message", + "type": "const char *", + "comment": "The message to insert into the reflogs. If NULL, the\n\t\t\t\t\t\t\t\t default is \"fetch\"" + } + ], + "argline": "git_remote *remote, const git_strarray *refspecs, const git_fetch_options *opts, const char *reflog_message", + "sig": "git_remote *::const git_strarray *::const git_fetch_options *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Download new data and update tips

\n", + "comments": "

Convenience function to connect to a remote, download the data, disconnect and update the remote-tracking branches.

\n", + "group": "remote", + "examples": { + "network/fetch.c": [ + "ex/HEAD/network/fetch.html#git_remote_fetch-8" + ] + } + }, + "git_remote_prune": { + "type": "function", + "file": "remote.h", + "line": 710, + "lineto": 710, + "args": [ + { + "name": "remote", + "type": "git_remote *", + "comment": "the remote to prune" + }, + { + "name": "callbacks", + "type": "const git_remote_callbacks *", + "comment": "callbacks to use for this prune" + } + ], + "argline": "git_remote *remote, const git_remote_callbacks *callbacks", + "sig": "git_remote *::const git_remote_callbacks *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Prune tracking refs that are no longer present on remote

\n", + "comments": "", + "group": "remote" + }, + "git_remote_push": { + "type": "function", + "file": "remote.h", + "line": 722, + "lineto": 724, + "args": [ + { + "name": "remote", + "type": "git_remote *", + "comment": "the remote to push to" + }, + { + "name": "refspecs", + "type": "const git_strarray *", + "comment": "the refspecs to use for pushing. If none are\n passed, the configured refspecs will be used" + }, + { + "name": "opts", + "type": "const git_push_options *", + "comment": "options to use for this push" + } + ], + "argline": "git_remote *remote, const git_strarray *refspecs, const git_push_options *opts", + "sig": "git_remote *::const git_strarray *::const git_push_options *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Perform a push

\n", + "comments": "

Peform all the steps from a push.

\n", + "group": "remote" + }, + "git_remote_stats": { + "type": "function", + "file": "remote.h", + "line": 729, + "lineto": 729, + "args": [ + { + "name": "remote", + "type": "git_remote *", + "comment": null + } + ], + "argline": "git_remote *remote", + "sig": "git_remote *", + "return": { + "type": "const git_transfer_progress *", + "comment": null + }, + "description": "

Get the statistics structure that is filled in by the fetch operation.

\n", + "comments": "", + "group": "remote", + "examples": { + "network/fetch.c": [ + "ex/HEAD/network/fetch.html#git_remote_stats-9" + ] + } + }, + "git_remote_autotag": { + "type": "function", + "file": "remote.h", + "line": 737, + "lineto": 737, + "args": [ + { + "name": "remote", + "type": "const git_remote *", + "comment": "the remote to query" + } + ], + "argline": "const git_remote *remote", + "sig": "const git_remote *", + "return": { + "type": "git_remote_autotag_option_t", + "comment": " the auto-follow setting" + }, + "description": "

Retrieve the tag auto-follow setting

\n", + "comments": "", + "group": "remote" + }, + "git_remote_set_autotag": { + "type": "function", + "file": "remote.h", + "line": 749, + "lineto": 749, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository in which to make the change" + }, + { + "name": "remote", + "type": "const char *", + "comment": "the name of the remote" + }, + { + "name": "value", + "type": "git_remote_autotag_option_t", + "comment": "the new value to take." + } + ], + "argline": "git_repository *repo, const char *remote, git_remote_autotag_option_t value", + "sig": "git_repository *::const char *::git_remote_autotag_option_t", + "return": { + "type": "int", + "comment": null + }, + "description": "

Set the remote's tag following setting.

\n", + "comments": "

The change will be made in the configuration. No loaded remotes will be affected.

\n", + "group": "remote" + }, + "git_remote_prune_refs": { + "type": "function", + "file": "remote.h", + "line": 756, + "lineto": 756, + "args": [ + { + "name": "remote", + "type": "const git_remote *", + "comment": "the remote to query" + } + ], + "argline": "const git_remote *remote", + "sig": "const git_remote *", + "return": { + "type": "int", + "comment": " the ref-prune setting" + }, + "description": "

Retrieve the ref-prune setting

\n", + "comments": "", + "group": "remote" + }, + "git_remote_rename": { + "type": "function", + "file": "remote.h", + "line": 778, + "lineto": 782, + "args": [ + { + "name": "problems", + "type": "git_strarray *", + "comment": "non-default refspecs cannot be renamed and will be\n stored here for further processing by the caller. Always free this\n strarray on successful return." + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository in which to rename" + }, + { + "name": "name", + "type": "const char *", + "comment": "the current name of the remote" + }, + { + "name": "new_name", + "type": "const char *", + "comment": "the new name the remote should bear" + } + ], + "argline": "git_strarray *problems, git_repository *repo, const char *name, const char *new_name", + "sig": "git_strarray *::git_repository *::const char *::const char *", + "return": { + "type": "int", + "comment": " 0, GIT_EINVALIDSPEC, GIT_EEXISTS or an error code" + }, + "description": "

Give the remote a new name

\n", + "comments": "

All remote-tracking branches and configuration settings for the remote are updated.

\n\n

The new name will be checked for validity. See git_tag_create() for rules about valid names.

\n\n

No loaded instances of a the remote with the old name will change their name or their list of refspecs.

\n", + "group": "remote", + "examples": { + "remote.c": [ + "ex/HEAD/remote.html#git_remote_rename-12" + ] + } + }, + "git_remote_is_valid_name": { + "type": "function", + "file": "remote.h", + "line": 790, + "lineto": 790, + "args": [ + { + "name": "remote_name", + "type": "const char *", + "comment": "name to be checked." + } + ], + "argline": "const char *remote_name", + "sig": "const char *", + "return": { + "type": "int", + "comment": " 1 if the reference name is acceptable; 0 if it isn't" + }, + "description": "

Ensure the remote name is well-formed.

\n", + "comments": "", + "group": "remote" + }, + "git_remote_delete": { + "type": "function", + "file": "remote.h", + "line": 802, + "lineto": 802, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository in which to act" + }, + { + "name": "name", + "type": "const char *", + "comment": "the name of the remove to delete" + } + ], + "argline": "git_repository *repo, const char *name", + "sig": "git_repository *::const char *", + "return": { + "type": "int", + "comment": " 0 on success, or an error code." + }, + "description": "

Delete an existing persisted remote.

\n", + "comments": "

All remote-tracking branches and configuration settings for the remote will be removed.

\n", + "group": "remote", + "examples": { + "remote.c": [ + "ex/HEAD/remote.html#git_remote_delete-13" + ] + } + }, + "git_remote_default_branch": { + "type": "function", + "file": "remote.h", + "line": 820, + "lineto": 820, + "args": [ + { + "name": "out", + "type": "git_buf *", + "comment": "the buffern in which to store the reference name" + }, + { + "name": "remote", + "type": "git_remote *", + "comment": "the remote" + } + ], + "argline": "git_buf *out, git_remote *remote", + "sig": "git_buf *::git_remote *", + "return": { + "type": "int", + "comment": " 0, GIT_ENOTFOUND if the remote does not have any references\n or none of them point to HEAD's commit, or an error message." + }, + "description": "

Retrieve the name of the remote's default branch

\n", + "comments": "

The default branch of a repository is the branch which HEAD points to. If the remote does not support reporting this information directly, it performs the guess as git does; that is, if there are multiple branches which point to the same commit, the first one is chosen. If the master branch is a candidate, it wins.

\n\n

This function must only be called after connecting.

\n", + "group": "remote" + }, + "git_repository_open": { + "type": "function", + "file": "repository.h", + "line": 37, + "lineto": 37, + "args": [ + { + "name": "out", + "type": "git_repository **", + "comment": "pointer to the repo which will be opened" + }, + { + "name": "path", + "type": "const char *", + "comment": "the path to the repository" + } + ], + "argline": "git_repository **out, const char *path", + "sig": "git_repository **::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Open a git repository.

\n", + "comments": "

The 'path' argument must point to either a git repository folder, or an existing work dir.

\n\n

The method will automatically detect if 'path' is a normal or bare repository or fail is 'path' is neither.

\n", + "group": "repository", + "examples": { + "general.c": [ + "ex/HEAD/general.html#git_repository_open-58" + ], + "network/git2.c": [ + "ex/HEAD/network/git2.html#git_repository_open-5" + ], + "remote.c": [ + "ex/HEAD/remote.html#git_repository_open-14" + ] + } + }, + "git_repository_wrap_odb": { + "type": "function", + "file": "repository.h", + "line": 50, + "lineto": 50, + "args": [ + { + "name": "out", + "type": "git_repository **", + "comment": "pointer to the repo" + }, + { + "name": "odb", + "type": "git_odb *", + "comment": "the object database to wrap" + } + ], + "argline": "git_repository **out, git_odb *odb", + "sig": "git_repository **::git_odb *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Create a "fake" repository to wrap an object database

\n", + "comments": "

Create a repository object to wrap an object database to be used with the API when all you have is an object database. This doesn't have any paths associated with it, so use with care.

\n", + "group": "repository" + }, + "git_repository_discover": { + "type": "function", + "file": "repository.h", + "line": 78, + "lineto": 82, + "args": [ + { + "name": "out", + "type": "git_buf *", + "comment": "A pointer to a user-allocated git_buf which will contain\n the found path." + }, + { + "name": "start_path", + "type": "const char *", + "comment": "The base path where the lookup starts." + }, + { + "name": "across_fs", + "type": "int", + "comment": "If true, then the lookup will not stop when a\n filesystem device change is detected while exploring parent directories." + }, + { + "name": "ceiling_dirs", + "type": "const char *", + "comment": "A GIT_PATH_LIST_SEPARATOR separated list of\n absolute symbolic link free paths. The lookup will stop when any\n of this paths is reached. Note that the lookup always performs on\n start_path no matter start_path appears in ceiling_dirs ceiling_dirs\n might be NULL (which is equivalent to an empty string)" + } + ], + "argline": "git_buf *out, const char *start_path, int across_fs, const char *ceiling_dirs", + "sig": "git_buf *::const char *::int::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Look for a git repository and copy its path in the given buffer.\n The lookup start from base_path and walk across parent directories\n if nothing has been found. The lookup ends when the first repository\n is found, or when reaching a directory referenced in ceiling_dirs\n or when the filesystem changes (in case across_fs is true).

\n", + "comments": "

The method will automatically detect if the repository is bare (if there is a repository).

\n", + "group": "repository", + "examples": { + "remote.c": [ + "ex/HEAD/remote.html#git_repository_discover-15" + ] + } + }, + "git_repository_open_ext": { + "type": "function", + "file": "repository.h", + "line": 122, + "lineto": 126, + "args": [ + { + "name": "out", + "type": "git_repository **", + "comment": "Pointer to the repo which will be opened. This can\n actually be NULL if you only want to use the error code to\n see if a repo at this path could be opened." + }, + { + "name": "path", + "type": "const char *", + "comment": "Path to open as git repository. If the flags\n permit \"searching\", then this can be a path to a subdirectory\n inside the working directory of the repository." + }, + { + "name": "flags", + "type": "unsigned int", + "comment": "A combination of the GIT_REPOSITORY_OPEN flags above." + }, + { + "name": "ceiling_dirs", + "type": "const char *", + "comment": "A GIT_PATH_LIST_SEPARATOR delimited list of path\n prefixes at which the search for a containing repository should\n terminate." + } + ], + "argline": "git_repository **out, const char *path, unsigned int flags, const char *ceiling_dirs", + "sig": "git_repository **::const char *::unsigned int::const char *", + "return": { + "type": "int", + "comment": " 0 on success, GIT_ENOTFOUND if no repository could be found,\n or -1 if there was a repository but open failed for some reason\n (such as repo corruption or system errors)." + }, + "description": "

Find and open a repository with extended controls.

\n", + "comments": "", + "group": "repository", + "examples": { + "blame.c": [ + "ex/HEAD/blame.html#git_repository_open_ext-24" + ], + "cat-file.c": [ + "ex/HEAD/cat-file.html#git_repository_open_ext-31" + ], + "describe.c": [ + "ex/HEAD/describe.html#git_repository_open_ext-6" + ], + "diff.c": [ + "ex/HEAD/diff.html#git_repository_open_ext-15" + ], + "log.c": [ + "ex/HEAD/log.html#git_repository_open_ext-44", + "ex/HEAD/log.html#git_repository_open_ext-45" + ], + "rev-parse.c": [ + "ex/HEAD/rev-parse.html#git_repository_open_ext-16" + ], + "status.c": [ + "ex/HEAD/status.html#git_repository_open_ext-5" + ], + "tag.c": [ + "ex/HEAD/tag.html#git_repository_open_ext-11" + ] + } + }, + "git_repository_open_bare": { + "type": "function", + "file": "repository.h", + "line": 139, + "lineto": 139, + "args": [ + { + "name": "out", + "type": "git_repository **", + "comment": "Pointer to the repo which will be opened." + }, + { + "name": "bare_path", + "type": "const char *", + "comment": "Direct path to the bare repository" + } + ], + "argline": "git_repository **out, const char *bare_path", + "sig": "git_repository **::const char *", + "return": { + "type": "int", + "comment": " 0 on success, or an error code" + }, + "description": "

Open a bare repository on the serverside.

\n", + "comments": "

This is a fast open for bare repositories that will come in handy if you're e.g. hosting git repositories and need to access them efficiently

\n", + "group": "repository" + }, + "git_repository_free": { + "type": "function", + "file": "repository.h", + "line": 152, + "lineto": 152, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "repository handle to close. If NULL nothing occurs." + } + ], + "argline": "git_repository *repo", + "sig": "git_repository *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Free a previously allocated repository

\n", + "comments": "

Note that after a repository is free'd, all the objects it has spawned will still exist until they are manually closed by the user with git_object_free, but accessing any of the attributes of an object without a backing repository will result in undefined behavior

\n", + "group": "repository", + "examples": { + "blame.c": [ + "ex/HEAD/blame.html#git_repository_free-25" + ], + "cat-file.c": [ + "ex/HEAD/cat-file.html#git_repository_free-32" + ], + "describe.c": [ + "ex/HEAD/describe.html#git_repository_free-7" + ], + "diff.c": [ + "ex/HEAD/diff.html#git_repository_free-16" + ], + "general.c": [ + "ex/HEAD/general.html#git_repository_free-59" + ], + "init.c": [ + "ex/HEAD/init.html#git_repository_free-6" + ], + "log.c": [ + "ex/HEAD/log.html#git_repository_free-46" + ], + "network/clone.c": [ + "ex/HEAD/network/clone.html#git_repository_free-3" + ], + "network/git2.c": [ + "ex/HEAD/network/git2.html#git_repository_free-6" + ], + "rev-parse.c": [ + "ex/HEAD/rev-parse.html#git_repository_free-17" + ], + "status.c": [ + "ex/HEAD/status.html#git_repository_free-6" + ], + "tag.c": [ + "ex/HEAD/tag.html#git_repository_free-12" + ] + } + }, + "git_repository_init": { + "type": "function", + "file": "repository.h", + "line": 169, + "lineto": 172, + "args": [ + { + "name": "out", + "type": "git_repository **", + "comment": "pointer to the repo which will be created or reinitialized" + }, + { + "name": "path", + "type": "const char *", + "comment": "the path to the repository" + }, + { + "name": "is_bare", + "type": "unsigned int", + "comment": "if true, a Git repository without a working directory is\n\t\tcreated at the pointed path. If false, provided path will be\n\t\tconsidered as the working directory into which the .git directory\n\t\twill be created." + } + ], + "argline": "git_repository **out, const char *path, unsigned int is_bare", + "sig": "git_repository **::const char *::unsigned int", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Creates a new Git repository in the given folder.

\n", + "comments": "

TODO: - Reinit the repository

\n", + "group": "repository", + "examples": { + "init.c": [ + "ex/HEAD/init.html#git_repository_init-7" + ] + } + }, + "git_repository_init_init_options": { + "type": "function", + "file": "repository.h", + "line": 281, + "lineto": 283, + "args": [ + { + "name": "opts", + "type": "git_repository_init_options *", + "comment": "the `git_repository_init_options` struct to initialize" + }, + { + "name": "version", + "type": "unsigned int", + "comment": "Version of struct; pass `GIT_REPOSITORY_INIT_OPTIONS_VERSION`" + } + ], + "argline": "git_repository_init_options *opts, unsigned int version", + "sig": "git_repository_init_options *::unsigned int", + "return": { + "type": "int", + "comment": " Zero on success; -1 on failure." + }, + "description": "

Initializes a git_repository_init_options with default values. Equivalent\n to creating an instance with GIT_REPOSITORY_INIT_OPTIONS_INIT.

\n", + "comments": "", + "group": "repository" + }, + "git_repository_init_ext": { + "type": "function", + "file": "repository.h", + "line": 298, + "lineto": 301, + "args": [ + { + "name": "out", + "type": "git_repository **", + "comment": "Pointer to the repo which will be created or reinitialized." + }, + { + "name": "repo_path", + "type": "const char *", + "comment": "The path to the repository." + }, + { + "name": "opts", + "type": "git_repository_init_options *", + "comment": "Pointer to git_repository_init_options struct." + } + ], + "argline": "git_repository **out, const char *repo_path, git_repository_init_options *opts", + "sig": "git_repository **::const char *::git_repository_init_options *", + "return": { + "type": "int", + "comment": " 0 or an error code on failure." + }, + "description": "

Create a new Git repository in the given folder with extended controls.

\n", + "comments": "

This will initialize a new git repository (creating the repo_path if requested by flags) and working directory as needed. It will auto-detect the case sensitivity of the file system and if the file system supports file mode bits correctly.

\n", + "group": "repository", + "examples": { + "init.c": [ + "ex/HEAD/init.html#git_repository_init_ext-8" + ] + } + }, + "git_repository_head": { + "type": "function", + "file": "repository.h", + "line": 316, + "lineto": 316, + "args": [ + { + "name": "out", + "type": "git_reference **", + "comment": "pointer to the reference which will be retrieved" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "a repository object" + } + ], + "argline": "git_reference **out, git_repository *repo", + "sig": "git_reference **::git_repository *", + "return": { + "type": "int", + "comment": " 0 on success, GIT_EUNBORNBRANCH when HEAD points to a non existing\n branch, GIT_ENOTFOUND when HEAD is missing; an error code otherwise" + }, + "description": "

Retrieve and resolve the reference pointed at by HEAD.

\n", + "comments": "

The returned git_reference will be owned by caller and git_reference_free() must be called when done with it to release the allocated memory and prevent a leak.

\n", + "group": "repository", + "examples": { + "status.c": [ + "ex/HEAD/status.html#git_repository_head-7" + ] + } + }, + "git_repository_head_detached": { + "type": "function", + "file": "repository.h", + "line": 328, + "lineto": 328, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "Repo to test" + } + ], + "argline": "git_repository *repo", + "sig": "git_repository *", + "return": { + "type": "int", + "comment": " 1 if HEAD is detached, 0 if it's not; error code if there\n was an error." + }, + "description": "

Check if a repository's HEAD is detached

\n", + "comments": "

A repository's HEAD is detached when it points directly to a commit instead of a branch.

\n", + "group": "repository" + }, + "git_repository_head_unborn": { + "type": "function", + "file": "repository.h", + "line": 340, + "lineto": 340, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "Repo to test" + } + ], + "argline": "git_repository *repo", + "sig": "git_repository *", + "return": { + "type": "int", + "comment": " 1 if the current branch is unborn, 0 if it's not; error\n code if there was an error" + }, + "description": "

Check if the current branch is unborn

\n", + "comments": "

An unborn branch is one named from HEAD but which doesn't exist in the refs namespace, because it doesn't have any commit to point to.

\n", + "group": "repository" + }, + "git_repository_is_empty": { + "type": "function", + "file": "repository.h", + "line": 352, + "lineto": 352, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "Repo to test" + } + ], + "argline": "git_repository *repo", + "sig": "git_repository *", + "return": { + "type": "int", + "comment": " 1 if the repository is empty, 0 if it isn't, error code\n if the repository is corrupted" + }, + "description": "

Check if a repository is empty

\n", + "comments": "

An empty repository has just been initialized and contains no references apart from HEAD, which must be pointing to the unborn master branch.

\n", + "group": "repository" + }, + "git_repository_path": { + "type": "function", + "file": "repository.h", + "line": 363, + "lineto": 363, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "A repository object" + } + ], + "argline": "git_repository *repo", + "sig": "git_repository *", + "return": { + "type": "const char *", + "comment": " the path to the repository" + }, + "description": "

Get the path of this repository

\n", + "comments": "

This is the path of the .git folder for normal repositories, or of the repository itself for bare repositories.

\n", + "group": "repository", + "examples": { + "init.c": [ + "ex/HEAD/init.html#git_repository_path-9" + ], + "status.c": [ + "ex/HEAD/status.html#git_repository_path-8" + ] + } + }, + "git_repository_workdir": { + "type": "function", + "file": "repository.h", + "line": 374, + "lineto": 374, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "A repository object" + } + ], + "argline": "git_repository *repo", + "sig": "git_repository *", + "return": { + "type": "const char *", + "comment": " the path to the working dir, if it exists" + }, + "description": "

Get the path of the working directory for this repository

\n", + "comments": "

If the repository is bare, this function will always return NULL.

\n", + "group": "repository", + "examples": { + "init.c": [ + "ex/HEAD/init.html#git_repository_workdir-10" + ] + } + }, + "git_repository_set_workdir": { + "type": "function", + "file": "repository.h", + "line": 393, + "lineto": 394, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "A repository object" + }, + { + "name": "workdir", + "type": "const char *", + "comment": "The path to a working directory" + }, + { + "name": "update_gitlink", + "type": "int", + "comment": "Create/update gitlink in workdir and set config\n \"core.worktree\" (if workdir is not the parent of the .git directory)" + } + ], + "argline": "git_repository *repo, const char *workdir, int update_gitlink", + "sig": "git_repository *::const char *::int", + "return": { + "type": "int", + "comment": " 0, or an error code" + }, + "description": "

Set the path to the working directory for this repository

\n", + "comments": "

The working directory doesn't need to be the same one that contains the .git folder for this repository.

\n\n

If this repository is bare, setting its working directory will turn it into a normal repository, capable of performing all the common workdir operations (checkout, status, index manipulation, etc).

\n", + "group": "repository" + }, + "git_repository_is_bare": { + "type": "function", + "file": "repository.h", + "line": 402, + "lineto": 402, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "Repo to test" + } + ], + "argline": "git_repository *repo", + "sig": "git_repository *", + "return": { + "type": "int", + "comment": " 1 if the repository is bare, 0 otherwise." + }, + "description": "

Check if a repository is bare

\n", + "comments": "", + "group": "repository", + "examples": { + "status.c": [ + "ex/HEAD/status.html#git_repository_is_bare-9" + ] + } + }, + "git_repository_config": { + "type": "function", + "file": "repository.h", + "line": 418, + "lineto": 418, + "args": [ + { + "name": "out", + "type": "git_config **", + "comment": "Pointer to store the loaded configuration" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "A repository object" + } + ], + "argline": "git_config **out, git_repository *repo", + "sig": "git_config **::git_repository *", + "return": { + "type": "int", + "comment": " 0, or an error code" + }, + "description": "

Get the configuration file for this repository.

\n", + "comments": "

If a configuration file has not been set, the default config set for the repository will be returned, including global and system configurations (if they are available).

\n\n

The configuration file must be freed once it's no longer being used by the user.

\n", + "group": "repository" + }, + "git_repository_config_snapshot": { + "type": "function", + "file": "repository.h", + "line": 434, + "lineto": 434, + "args": [ + { + "name": "out", + "type": "git_config **", + "comment": "Pointer to store the loaded configuration" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository" + } + ], + "argline": "git_config **out, git_repository *repo", + "sig": "git_config **::git_repository *", + "return": { + "type": "int", + "comment": " 0, or an error code" + }, + "description": "

Get a snapshot of the repository's configuration

\n", + "comments": "

Convenience function to take a snapshot from the repository's configuration. The contents of this snapshot will not change, even if the underlying config files are modified.

\n\n

The configuration file must be freed once it's no longer being used by the user.

\n", + "group": "repository" + }, + "git_repository_odb": { + "type": "function", + "file": "repository.h", + "line": 450, + "lineto": 450, + "args": [ + { + "name": "out", + "type": "git_odb **", + "comment": "Pointer to store the loaded ODB" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "A repository object" + } + ], + "argline": "git_odb **out, git_repository *repo", + "sig": "git_odb **::git_repository *", + "return": { + "type": "int", + "comment": " 0, or an error code" + }, + "description": "

Get the Object Database for this repository.

\n", + "comments": "

If a custom ODB has not been set, the default database for the repository will be returned (the one located in .git/objects).

\n\n

The ODB must be freed once it's no longer being used by the user.

\n", + "group": "repository", + "examples": { + "cat-file.c": [ + "ex/HEAD/cat-file.html#git_repository_odb-33" + ], + "general.c": [ + "ex/HEAD/general.html#git_repository_odb-60" + ] + } + }, + "git_repository_refdb": { + "type": "function", + "file": "repository.h", + "line": 466, + "lineto": 466, + "args": [ + { + "name": "out", + "type": "git_refdb **", + "comment": "Pointer to store the loaded refdb" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "A repository object" + } + ], + "argline": "git_refdb **out, git_repository *repo", + "sig": "git_refdb **::git_repository *", + "return": { + "type": "int", + "comment": " 0, or an error code" + }, + "description": "

Get the Reference Database Backend for this repository.

\n", + "comments": "

If a custom refsdb has not been set, the default database for the repository will be returned (the one that manipulates loose and packed references in the .git directory).

\n\n

The refdb must be freed once it's no longer being used by the user.

\n", + "group": "repository" + }, + "git_repository_index": { + "type": "function", + "file": "repository.h", + "line": 482, + "lineto": 482, + "args": [ + { + "name": "out", + "type": "git_index **", + "comment": "Pointer to store the loaded index" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "A repository object" + } + ], + "argline": "git_index **out, git_repository *repo", + "sig": "git_index **::git_repository *", + "return": { + "type": "int", + "comment": " 0, or an error code" + }, + "description": "

Get the Index file for this repository.

\n", + "comments": "

If a custom index has not been set, the default index for the repository will be returned (the one located in .git/index).

\n\n

The index must be freed once it's no longer being used by the user.

\n", + "group": "repository", + "examples": { + "general.c": [ + "ex/HEAD/general.html#git_repository_index-61" + ], + "init.c": [ + "ex/HEAD/init.html#git_repository_index-11" + ] + } + }, + "git_repository_message": { + "type": "function", + "file": "repository.h", + "line": 500, + "lineto": 500, + "args": [ + { + "name": "out", + "type": "git_buf *", + "comment": "git_buf to write data into" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "Repository to read prepared message from" + } + ], + "argline": "git_buf *out, git_repository *repo", + "sig": "git_buf *::git_repository *", + "return": { + "type": "int", + "comment": " 0, GIT_ENOTFOUND if no message exists or an error code" + }, + "description": "

Retrieve git's prepared message

\n", + "comments": "

Operations such as git revert/cherry-pick/merge with the -n option stop just short of creating a commit with the changes and save their prepared message in .git/MERGE_MSG so the next git-commit execution can present it to the user for them to amend if they wish.

\n\n

Use this function to get the contents of this file. Don't forget to remove the file after you create the commit.

\n", + "group": "repository" + }, + "git_repository_message_remove": { + "type": "function", + "file": "repository.h", + "line": 507, + "lineto": 507, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": null + } + ], + "argline": "git_repository *repo", + "sig": "git_repository *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Remove git's prepared message.

\n", + "comments": "

Remove the message that git_repository_message retrieves.

\n", + "group": "repository" + }, + "git_repository_state_cleanup": { + "type": "function", + "file": "repository.h", + "line": 516, + "lineto": 516, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "A repository object" + } + ], + "argline": "git_repository *repo", + "sig": "git_repository *", + "return": { + "type": "int", + "comment": " 0 on success, or error" + }, + "description": "

Remove all the metadata associated with an ongoing command like merge,\n revert, cherry-pick, etc. For example: MERGE_HEAD, MERGE_MSG, etc.

\n", + "comments": "", + "group": "repository" + }, + "git_repository_fetchhead_foreach": { + "type": "function", + "file": "repository.h", + "line": 535, + "lineto": 538, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "A repository object" + }, + { + "name": "callback", + "type": "git_repository_fetchhead_foreach_cb", + "comment": "Callback function" + }, + { + "name": "payload", + "type": "void *", + "comment": "Pointer to callback data (optional)" + } + ], + "argline": "git_repository *repo, git_repository_fetchhead_foreach_cb callback, void *payload", + "sig": "git_repository *::git_repository_fetchhead_foreach_cb::void *", + "return": { + "type": "int", + "comment": " 0 on success, non-zero callback return value, GIT_ENOTFOUND if\n there is no FETCH_HEAD file, or other error code." + }, + "description": "

Invoke 'callback' for each entry in the given FETCH_HEAD file.

\n", + "comments": "

Return a non-zero value from the callback to stop the loop.

\n", + "group": "repository" + }, + "git_repository_mergehead_foreach": { + "type": "function", + "file": "repository.h", + "line": 555, + "lineto": 558, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "A repository object" + }, + { + "name": "callback", + "type": "git_repository_mergehead_foreach_cb", + "comment": "Callback function" + }, + { + "name": "payload", + "type": "void *", + "comment": "Pointer to callback data (optional)" + } + ], + "argline": "git_repository *repo, git_repository_mergehead_foreach_cb callback, void *payload", + "sig": "git_repository *::git_repository_mergehead_foreach_cb::void *", + "return": { + "type": "int", + "comment": " 0 on success, non-zero callback return value, GIT_ENOTFOUND if\n there is no MERGE_HEAD file, or other error code." + }, + "description": "

If a merge is in progress, invoke 'callback' for each commit ID in the\n MERGE_HEAD file.

\n", + "comments": "

Return a non-zero value from the callback to stop the loop.

\n", + "group": "repository" + }, + "git_repository_hashfile": { + "type": "function", + "file": "repository.h", + "line": 583, + "lineto": 588, + "args": [ + { + "name": "out", + "type": "git_oid *", + "comment": "Output value of calculated SHA" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "Repository pointer" + }, + { + "name": "path", + "type": "const char *", + "comment": "Path to file on disk whose contents should be hashed. If the\n repository is not NULL, this can be a relative path." + }, + { + "name": "type", + "type": "git_otype", + "comment": "The object type to hash as (e.g. GIT_OBJ_BLOB)" + }, + { + "name": "as_path", + "type": "const char *", + "comment": "The path to use to look up filtering rules. If this is\n NULL, then the `path` parameter will be used instead. If\n this is passed as the empty string, then no filters will be\n applied when calculating the hash." + } + ], + "argline": "git_oid *out, git_repository *repo, const char *path, git_otype type, const char *as_path", + "sig": "git_oid *::git_repository *::const char *::git_otype::const char *", + "return": { + "type": "int", + "comment": " 0 on success, or an error code" + }, + "description": "

Calculate hash of file using repository filtering rules.

\n", + "comments": "

If you simply want to calculate the hash of a file on disk with no filters, you can just use the git_odb_hashfile() API. However, if you want to hash a file in the repository and you want to apply filtering rules (e.g. crlf filters) before generating the SHA, then use this function.

\n\n

Note: if the repository has core.safecrlf set to fail and the filtering triggers that failure, then this function will return an error and not calculate the hash of the file.

\n", + "group": "repository" + }, + "git_repository_set_head": { + "type": "function", + "file": "repository.h", + "line": 608, + "lineto": 610, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "Repository pointer" + }, + { + "name": "refname", + "type": "const char *", + "comment": "Canonical name of the reference the HEAD should point at" + } + ], + "argline": "git_repository *repo, const char *refname", + "sig": "git_repository *::const char *", + "return": { + "type": "int", + "comment": " 0 on success, or an error code" + }, + "description": "

Make the repository HEAD point to the specified reference.

\n", + "comments": "

If the provided reference points to a Tree or a Blob, the HEAD is unaltered and -1 is returned.

\n\n

If the provided reference points to a branch, the HEAD will point to that branch, staying attached, or become attached if it isn't yet. If the branch doesn't exist yet, no error will be return. The HEAD will then be attached to an unborn branch.

\n\n

Otherwise, the HEAD will be detached and will directly point to the Commit.

\n", + "group": "repository" + }, + "git_repository_set_head_detached": { + "type": "function", + "file": "repository.h", + "line": 628, + "lineto": 630, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "Repository pointer" + }, + { + "name": "commitish", + "type": "const git_oid *", + "comment": "Object id of the Commit the HEAD should point to" + } + ], + "argline": "git_repository *repo, const git_oid *commitish", + "sig": "git_repository *::const git_oid *", + "return": { + "type": "int", + "comment": " 0 on success, or an error code" + }, + "description": "

Make the repository HEAD directly point to the Commit.

\n", + "comments": "

If the provided committish cannot be found in the repository, the HEAD is unaltered and GIT_ENOTFOUND is returned.

\n\n

If the provided commitish cannot be peeled into a commit, the HEAD is unaltered and -1 is returned.

\n\n

Otherwise, the HEAD will eventually be detached and will directly point to the peeled Commit.

\n", + "group": "repository" + }, + "git_repository_set_head_detached_from_annotated": { + "type": "function", + "file": "repository.h", + "line": 644, + "lineto": 646, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": null + }, + { + "name": "commitish", + "type": "const git_annotated_commit *", + "comment": null + } + ], + "argline": "git_repository *repo, const git_annotated_commit *commitish", + "sig": "git_repository *::const git_annotated_commit *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Make the repository HEAD directly point to the Commit.

\n", + "comments": "

This behaves like git_repository_set_head_detached() but takes an annotated commit, which lets you specify which extended sha syntax string was specified by a user, allowing for more exact reflog messages.

\n\n

See the documentation for git_repository_set_head_detached().

\n", + "group": "repository" + }, + "git_repository_detach_head": { + "type": "function", + "file": "repository.h", + "line": 665, + "lineto": 666, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "Repository pointer" + } + ], + "argline": "git_repository *repo", + "sig": "git_repository *", + "return": { + "type": "int", + "comment": " 0 on success, GIT_EUNBORNBRANCH when HEAD points to a non existing\n branch or an error code" + }, + "description": "

Detach the HEAD.

\n", + "comments": "

If the HEAD is already detached and points to a Commit, 0 is returned.

\n\n

If the HEAD is already detached and points to a Tag, the HEAD is updated into making it point to the peeled Commit, and 0 is returned.

\n\n

If the HEAD is already detached and points to a non commitish, the HEAD is unaltered, and -1 is returned.

\n\n

Otherwise, the HEAD will be detached and point to the peeled Commit.

\n", + "group": "repository" + }, + "git_repository_state": { + "type": "function", + "file": "repository.h", + "line": 696, + "lineto": 696, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "Repository pointer" + } + ], + "argline": "git_repository *repo", + "sig": "git_repository *", + "return": { + "type": "int", + "comment": " The state of the repository" + }, + "description": "

Determines the status of a git repository - ie, whether an operation\n (merge, cherry-pick, etc) is in progress.

\n", + "comments": "", + "group": "repository" + }, + "git_repository_set_namespace": { + "type": "function", + "file": "repository.h", + "line": 710, + "lineto": 710, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "The repo" + }, + { + "name": "nmspace", + "type": "const char *", + "comment": "The namespace. This should not include the refs\n\tfolder, e.g. to namespace all references under `refs/namespaces/foo/`,\n\tuse `foo` as the namespace." + } + ], + "argline": "git_repository *repo, const char *nmspace", + "sig": "git_repository *::const char *", + "return": { + "type": "int", + "comment": " 0 on success, -1 on error" + }, + "description": "

Sets the active namespace for this Git Repository

\n", + "comments": "

This namespace affects all reference operations for the repo. See man gitnamespaces

\n", + "group": "repository" + }, + "git_repository_get_namespace": { + "type": "function", + "file": "repository.h", + "line": 718, + "lineto": 718, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "The repo" + } + ], + "argline": "git_repository *repo", + "sig": "git_repository *", + "return": { + "type": "const char *", + "comment": " the active namespace, or NULL if there isn't one" + }, + "description": "

Get the currently active namespace for this repository

\n", + "comments": "", + "group": "repository" + }, + "git_repository_is_shallow": { + "type": "function", + "file": "repository.h", + "line": 727, + "lineto": 727, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "The repository" + } + ], + "argline": "git_repository *repo", + "sig": "git_repository *", + "return": { + "type": "int", + "comment": " 1 if shallow, zero if not" + }, + "description": "

Determine if the repository was a shallow clone

\n", + "comments": "", + "group": "repository" + }, + "git_repository_ident": { + "type": "function", + "file": "repository.h", + "line": 739, + "lineto": 739, + "args": [ + { + "name": "name", + "type": "const char **", + "comment": "where to store the pointer to the name" + }, + { + "name": "email", + "type": "const char **", + "comment": "where to store the pointer to the email" + }, + { + "name": "repo", + "type": "const git_repository *", + "comment": "the repository" + } + ], + "argline": "const char **name, const char **email, const git_repository *repo", + "sig": "const char **::const char **::const git_repository *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Retrieve the configured identity to use for reflogs

\n", + "comments": "

The memory is owned by the repository and must not be freed by the user.

\n", + "group": "repository" + }, + "git_repository_set_ident": { + "type": "function", + "file": "repository.h", + "line": 752, + "lineto": 752, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository to configure" + }, + { + "name": "name", + "type": "const char *", + "comment": "the name to use for the reflog entries" + }, + { + "name": "email", + "type": "const char *", + "comment": "the email to use for the reflog entries" + } + ], + "argline": "git_repository *repo, const char *name, const char *email", + "sig": "git_repository *::const char *::const char *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Set the identity to be used for writing reflogs

\n", + "comments": "

If both are set, this name and email will be used to write to the reflog. Pass NULL to unset. When unset, the identity will be taken from the repository's configuration.

\n", + "group": "repository" + }, + "git_reset": { + "type": "function", + "file": "reset.h", + "line": 62, + "lineto": 66, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "Repository where to perform the reset operation." + }, + { + "name": "target", + "type": "git_object *", + "comment": "Committish to which the Head should be moved to. This object\n must belong to the given `repo` and can either be a git_commit or a\n git_tag. When a git_tag is being passed, it should be dereferencable\n to a git_commit which oid will be used as the target of the branch." + }, + { + "name": "reset_type", + "type": "git_reset_t", + "comment": "Kind of reset operation to perform." + }, + { + "name": "checkout_opts", + "type": "const git_checkout_options *", + "comment": "Checkout options to be used for a HARD reset.\n The checkout_strategy field will be overridden (based on reset_type).\n This parameter can be used to propagate notify and progress callbacks." + } + ], + "argline": "git_repository *repo, git_object *target, git_reset_t reset_type, const git_checkout_options *checkout_opts", + "sig": "git_repository *::git_object *::git_reset_t::const git_checkout_options *", + "return": { + "type": "int", + "comment": " 0 on success or an error code" + }, + "description": "

Sets the current head to the specified commit oid and optionally\n resets the index and working tree to match.

\n", + "comments": "

SOFT reset means the Head will be moved to the commit.

\n\n

MIXED reset will trigger a SOFT reset, plus the index will be replaced with the content of the commit tree.

\n\n

HARD reset will trigger a MIXED reset and the working directory will be replaced with the content of the index. (Untracked and ignored files will be left alone, however.)

\n\n

TODO: Implement remaining kinds of resets.

\n", + "group": "reset" + }, + "git_reset_from_annotated": { + "type": "function", + "file": "reset.h", + "line": 80, + "lineto": 84, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": null + }, + { + "name": "commit", + "type": "git_annotated_commit *", + "comment": null + }, + { + "name": "reset_type", + "type": "git_reset_t", + "comment": null + }, + { + "name": "checkout_opts", + "type": "const git_checkout_options *", + "comment": null + } + ], + "argline": "git_repository *repo, git_annotated_commit *commit, git_reset_t reset_type, const git_checkout_options *checkout_opts", + "sig": "git_repository *::git_annotated_commit *::git_reset_t::const git_checkout_options *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Sets the current head to the specified commit oid and optionally\n resets the index and working tree to match.

\n", + "comments": "

This behaves like git_reset() but takes an annotated commit, which lets you specify which extended sha syntax string was specified by a user, allowing for more exact reflog messages.

\n\n

See the documentation for git_reset().

\n", + "group": "reset" + }, + "git_reset_default": { + "type": "function", + "file": "reset.h", + "line": 104, + "lineto": 107, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "Repository where to perform the reset operation." + }, + { + "name": "target", + "type": "git_object *", + "comment": "The committish which content will be used to reset the content\n of the index." + }, + { + "name": "pathspecs", + "type": "git_strarray *", + "comment": "List of pathspecs to operate on." + } + ], + "argline": "git_repository *repo, git_object *target, git_strarray *pathspecs", + "sig": "git_repository *::git_object *::git_strarray *", + "return": { + "type": "int", + "comment": " 0 on success or an error code \n<\n 0" + }, + "description": "

Updates some entries in the index from the target commit tree.

\n", + "comments": "

The scope of the updated entries is determined by the paths being passed in the pathspec parameters.

\n\n

Passing a NULL target will result in removing entries in the index matching the provided pathspecs.

\n", + "group": "reset" + }, + "git_revert_init_options": { + "type": "function", + "file": "revert.h", + "line": 47, + "lineto": 49, + "args": [ + { + "name": "opts", + "type": "git_revert_options *", + "comment": "the `git_revert_options` struct to initialize" + }, + { + "name": "version", + "type": "unsigned int", + "comment": "Version of struct; pass `GIT_REVERT_OPTIONS_VERSION`" + } + ], + "argline": "git_revert_options *opts, unsigned int version", + "sig": "git_revert_options *::unsigned int", + "return": { + "type": "int", + "comment": " Zero on success; -1 on failure." + }, + "description": "

Initializes a git_revert_options with default values. Equivalent to\n creating an instance with GIT_REVERT_OPTIONS_INIT.

\n", + "comments": "", + "group": "revert" + }, + "git_revert_commit": { + "type": "function", + "file": "revert.h", + "line": 65, + "lineto": 71, + "args": [ + { + "name": "out", + "type": "git_index **", + "comment": "pointer to store the index result in" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository that contains the given commits" + }, + { + "name": "revert_commit", + "type": "git_commit *", + "comment": "the commit to revert" + }, + { + "name": "our_commit", + "type": "git_commit *", + "comment": "the commit to revert against (eg, HEAD)" + }, + { + "name": "mainline", + "type": "unsigned int", + "comment": "the parent of the revert commit, if it is a merge" + }, + { + "name": "merge_options", + "type": "const git_merge_options *", + "comment": "the merge options (or null for defaults)" + } + ], + "argline": "git_index **out, git_repository *repo, git_commit *revert_commit, git_commit *our_commit, unsigned int mainline, const git_merge_options *merge_options", + "sig": "git_index **::git_repository *::git_commit *::git_commit *::unsigned int::const git_merge_options *", + "return": { + "type": "int", + "comment": " zero on success, -1 on failure." + }, + "description": "

Reverts the given commit against the given "our" commit, producing an\n index that reflects the result of the revert.

\n", + "comments": "

The returned index must be freed explicitly with git_index_free.

\n", + "group": "revert" + }, + "git_revert": { + "type": "function", + "file": "revert.h", + "line": 81, + "lineto": 84, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository to revert" + }, + { + "name": "commit", + "type": "git_commit *", + "comment": "the commit to revert" + }, + { + "name": "given_opts", + "type": "const git_revert_options *", + "comment": "merge flags" + } + ], + "argline": "git_repository *repo, git_commit *commit, const git_revert_options *given_opts", + "sig": "git_repository *::git_commit *::const git_revert_options *", + "return": { + "type": "int", + "comment": " zero on success, -1 on failure." + }, + "description": "

Reverts the given commit, producing changes in the index and working directory.

\n", + "comments": "", + "group": "revert" + }, + "git_revparse_single": { + "type": "function", + "file": "revparse.h", + "line": 37, + "lineto": 38, + "args": [ + { + "name": "out", + "type": "git_object **", + "comment": "pointer to output object" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository to search in" + }, + { + "name": "spec", + "type": "const char *", + "comment": "the textual specification for an object" + } + ], + "argline": "git_object **out, git_repository *repo, const char *spec", + "sig": "git_object **::git_repository *::const char *", + "return": { + "type": "int", + "comment": " 0 on success, GIT_ENOTFOUND, GIT_EAMBIGUOUS, GIT_EINVALIDSPEC or an error code" + }, + "description": "

Find a single object, as specified by a revision string.

\n", + "comments": "

See man gitrevisions, or http://git-scm.com/docs/git-rev-parse.html#_specifying_revisions for information on the syntax accepted.

\n\n

The returned object should be released with git_object_free when no longer needed.

\n", + "group": "revparse", + "examples": { + "blame.c": [ + "ex/HEAD/blame.html#git_revparse_single-26" + ], + "cat-file.c": [ + "ex/HEAD/cat-file.html#git_revparse_single-34" + ], + "describe.c": [ + "ex/HEAD/describe.html#git_revparse_single-8" + ], + "log.c": [ + "ex/HEAD/log.html#git_revparse_single-47" + ], + "tag.c": [ + "ex/HEAD/tag.html#git_revparse_single-13", + "ex/HEAD/tag.html#git_revparse_single-14", + "ex/HEAD/tag.html#git_revparse_single-15", + "ex/HEAD/tag.html#git_revparse_single-16" + ] + } + }, + "git_revparse_ext": { + "type": "function", + "file": "revparse.h", + "line": 61, + "lineto": 65, + "args": [ + { + "name": "object_out", + "type": "git_object **", + "comment": "pointer to output object" + }, + { + "name": "reference_out", + "type": "git_reference **", + "comment": "pointer to output reference or NULL" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository to search in" + }, + { + "name": "spec", + "type": "const char *", + "comment": "the textual specification for an object" + } + ], + "argline": "git_object **object_out, git_reference **reference_out, git_repository *repo, const char *spec", + "sig": "git_object **::git_reference **::git_repository *::const char *", + "return": { + "type": "int", + "comment": " 0 on success, GIT_ENOTFOUND, GIT_EAMBIGUOUS, GIT_EINVALIDSPEC\n or an error code" + }, + "description": "

Find a single object and intermediate reference by a revision string.

\n", + "comments": "

See man gitrevisions, or http://git-scm.com/docs/git-rev-parse.html#_specifying_revisions for information on the syntax accepted.

\n\n

In some cases (@{<-n>} or <branchname>@{upstream}), the expression may point to an intermediate reference. When such expressions are being passed in, reference_out will be valued as well.

\n\n

The returned object should be released with git_object_free and the returned reference with git_reference_free when no longer needed.

\n", + "group": "revparse" + }, + "git_revparse": { + "type": "function", + "file": "revparse.h", + "line": 105, + "lineto": 108, + "args": [ + { + "name": "revspec", + "type": "git_revspec *", + "comment": "Pointer to an user-allocated git_revspec struct where\n\t the result of the rev-parse will be stored" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository to search in" + }, + { + "name": "spec", + "type": "const char *", + "comment": "the rev-parse spec to parse" + } + ], + "argline": "git_revspec *revspec, git_repository *repo, const char *spec", + "sig": "git_revspec *::git_repository *::const char *", + "return": { + "type": "int", + "comment": " 0 on success, GIT_INVALIDSPEC, GIT_ENOTFOUND, GIT_EAMBIGUOUS or an error code" + }, + "description": "

Parse a revision string for from, to, and intent.

\n", + "comments": "

See man gitrevisions or http://git-scm.com/docs/git-rev-parse.html#_specifying_revisions for information on the syntax accepted.

\n", + "group": "revparse", + "examples": { + "blame.c": [ + "ex/HEAD/blame.html#git_revparse-27" + ], + "log.c": [ + "ex/HEAD/log.html#git_revparse-48" + ], + "rev-parse.c": [ + "ex/HEAD/rev-parse.html#git_revparse-18", + "ex/HEAD/rev-parse.html#git_revparse-19" + ] + } + }, + "git_revwalk_new": { + "type": "function", + "file": "revwalk.h", + "line": 75, + "lineto": 75, + "args": [ + { + "name": "out", + "type": "git_revwalk **", + "comment": "pointer to the new revision walker" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repo to walk through" + } + ], + "argline": "git_revwalk **out, git_repository *repo", + "sig": "git_revwalk **::git_repository *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Allocate a new revision walker to iterate through a repo.

\n", + "comments": "

This revision walker uses a custom memory pool and an internal commit cache, so it is relatively expensive to allocate.

\n\n

For maximum performance, this revision walker should be reused for different walks.

\n\n

This revision walker is not thread safe: it may only be used to walk a repository on a single thread; however, it is possible to have several revision walkers in several different threads walking the same repository.

\n", + "group": "revwalk", + "examples": { + "general.c": [ + "ex/HEAD/general.html#git_revwalk_new-62" + ], + "log.c": [ + "ex/HEAD/log.html#git_revwalk_new-49", + "ex/HEAD/log.html#git_revwalk_new-50" + ] + } + }, + "git_revwalk_reset": { + "type": "function", + "file": "revwalk.h", + "line": 90, + "lineto": 90, + "args": [ + { + "name": "walker", + "type": "git_revwalk *", + "comment": "handle to reset." + } + ], + "argline": "git_revwalk *walker", + "sig": "git_revwalk *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Reset the revision walker for reuse.

\n", + "comments": "

This will clear all the pushed and hidden commits, and leave the walker in a blank state (just like at creation) ready to receive new commit pushes and start a new walk.

\n\n

The revision walk is automatically reset when a walk is over.

\n", + "group": "revwalk" + }, + "git_revwalk_push": { + "type": "function", + "file": "revwalk.h", + "line": 109, + "lineto": 109, + "args": [ + { + "name": "walk", + "type": "git_revwalk *", + "comment": "the walker being used for the traversal." + }, + { + "name": "id", + "type": "const git_oid *", + "comment": "the oid of the commit to start from." + } + ], + "argline": "git_revwalk *walk, const git_oid *id", + "sig": "git_revwalk *::const git_oid *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Add a new root for the traversal

\n", + "comments": "

The pushed commit will be marked as one of the roots from which to start the walk. This commit may not be walked if it or a child is hidden.

\n\n

At least one commit must be pushed onto the walker before a walk can be started.

\n\n

The given id must belong to a committish on the walked repository.

\n", + "group": "revwalk", + "examples": { + "general.c": [ + "ex/HEAD/general.html#git_revwalk_push-63" + ], + "log.c": [ + "ex/HEAD/log.html#git_revwalk_push-51" + ] + } + }, + "git_revwalk_push_glob": { + "type": "function", + "file": "revwalk.h", + "line": 127, + "lineto": 127, + "args": [ + { + "name": "walk", + "type": "git_revwalk *", + "comment": "the walker being used for the traversal" + }, + { + "name": "glob", + "type": "const char *", + "comment": "the glob pattern references should match" + } + ], + "argline": "git_revwalk *walk, const char *glob", + "sig": "git_revwalk *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Push matching references

\n", + "comments": "

The OIDs pointed to by the references that match the given glob pattern will be pushed to the revision walker.

\n\n

A leading 'refs/' is implied if not present as well as a trailing '/*' if the glob lacks '?', '*' or '['.

\n\n

Any references matching this glob which do not point to a committish will be ignored.

\n", + "group": "revwalk" + }, + "git_revwalk_push_head": { + "type": "function", + "file": "revwalk.h", + "line": 135, + "lineto": 135, + "args": [ + { + "name": "walk", + "type": "git_revwalk *", + "comment": "the walker being used for the traversal" + } + ], + "argline": "git_revwalk *walk", + "sig": "git_revwalk *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Push the repository's HEAD

\n", + "comments": "", + "group": "revwalk", + "examples": { + "log.c": [ + "ex/HEAD/log.html#git_revwalk_push_head-52" + ] + } + }, + "git_revwalk_hide": { + "type": "function", + "file": "revwalk.h", + "line": 150, + "lineto": 150, + "args": [ + { + "name": "walk", + "type": "git_revwalk *", + "comment": "the walker being used for the traversal." + }, + { + "name": "commit_id", + "type": "const git_oid *", + "comment": "the oid of commit that will be ignored during the traversal" + } + ], + "argline": "git_revwalk *walk, const git_oid *commit_id", + "sig": "git_revwalk *::const git_oid *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Mark a commit (and its ancestors) uninteresting for the output.

\n", + "comments": "

The given id must belong to a committish on the walked repository.

\n\n

The resolved commit and all its parents will be hidden from the output on the revision walk.

\n", + "group": "revwalk", + "examples": { + "log.c": [ + "ex/HEAD/log.html#git_revwalk_hide-53" + ] + } + }, + "git_revwalk_hide_glob": { + "type": "function", + "file": "revwalk.h", + "line": 169, + "lineto": 169, + "args": [ + { + "name": "walk", + "type": "git_revwalk *", + "comment": "the walker being used for the traversal" + }, + { + "name": "glob", + "type": "const char *", + "comment": "the glob pattern references should match" + } + ], + "argline": "git_revwalk *walk, const char *glob", + "sig": "git_revwalk *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Hide matching references.

\n", + "comments": "

The OIDs pointed to by the references that match the given glob pattern and their ancestors will be hidden from the output on the revision walk.

\n\n

A leading 'refs/' is implied if not present as well as a trailing '/*' if the glob lacks '?', '*' or '['.

\n\n

Any references matching this glob which do not point to a committish will be ignored.

\n", + "group": "revwalk" + }, + "git_revwalk_hide_head": { + "type": "function", + "file": "revwalk.h", + "line": 177, + "lineto": 177, + "args": [ + { + "name": "walk", + "type": "git_revwalk *", + "comment": "the walker being used for the traversal" + } + ], + "argline": "git_revwalk *walk", + "sig": "git_revwalk *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Hide the repository's HEAD

\n", + "comments": "", + "group": "revwalk" + }, + "git_revwalk_push_ref": { + "type": "function", + "file": "revwalk.h", + "line": 188, + "lineto": 188, + "args": [ + { + "name": "walk", + "type": "git_revwalk *", + "comment": "the walker being used for the traversal" + }, + { + "name": "refname", + "type": "const char *", + "comment": "the reference to push" + } + ], + "argline": "git_revwalk *walk, const char *refname", + "sig": "git_revwalk *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Push the OID pointed to by a reference

\n", + "comments": "

The reference must point to a committish.

\n", + "group": "revwalk" + }, + "git_revwalk_hide_ref": { + "type": "function", + "file": "revwalk.h", + "line": 199, + "lineto": 199, + "args": [ + { + "name": "walk", + "type": "git_revwalk *", + "comment": "the walker being used for the traversal" + }, + { + "name": "refname", + "type": "const char *", + "comment": "the reference to hide" + } + ], + "argline": "git_revwalk *walk, const char *refname", + "sig": "git_revwalk *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Hide the OID pointed to by a reference

\n", + "comments": "

The reference must point to a committish.

\n", + "group": "revwalk" + }, + "git_revwalk_next": { + "type": "function", + "file": "revwalk.h", + "line": 219, + "lineto": 219, + "args": [ + { + "name": "out", + "type": "git_oid *", + "comment": "Pointer where to store the oid of the next commit" + }, + { + "name": "walk", + "type": "git_revwalk *", + "comment": "the walker to pop the commit from." + } + ], + "argline": "git_oid *out, git_revwalk *walk", + "sig": "git_oid *::git_revwalk *", + "return": { + "type": "int", + "comment": " 0 if the next commit was found;\n\tGIT_ITEROVER if there are no commits left to iterate" + }, + "description": "

Get the next commit from the revision walk.

\n", + "comments": "

The initial call to this method is not blocking when iterating through a repo with a time-sorting mode.

\n\n

Iterating with Topological or inverted modes makes the initial call blocking to preprocess the commit list, but this block should be mostly unnoticeable on most repositories (topological preprocessing times at 0.3s on the git.git repo).

\n\n

The revision walker is reset when the walk is over.

\n", + "group": "revwalk", + "examples": { + "general.c": [ + "ex/HEAD/general.html#git_revwalk_next-64" + ], + "log.c": [ + "ex/HEAD/log.html#git_revwalk_next-54" + ] + } + }, + "git_revwalk_sorting": { + "type": "function", + "file": "revwalk.h", + "line": 230, + "lineto": 230, + "args": [ + { + "name": "walk", + "type": "git_revwalk *", + "comment": "the walker being used for the traversal." + }, + { + "name": "sort_mode", + "type": "unsigned int", + "comment": "combination of GIT_SORT_XXX flags" + } + ], + "argline": "git_revwalk *walk, unsigned int sort_mode", + "sig": "git_revwalk *::unsigned int", + "return": { + "type": "void", + "comment": null + }, + "description": "

Change the sorting mode when iterating through the\n repository's contents.

\n", + "comments": "

Changing the sorting mode resets the walker.

\n", + "group": "revwalk", + "examples": { + "general.c": [ + "ex/HEAD/general.html#git_revwalk_sorting-65" + ], + "log.c": [ + "ex/HEAD/log.html#git_revwalk_sorting-55", + "ex/HEAD/log.html#git_revwalk_sorting-56" + ] + } + }, + "git_revwalk_push_range": { + "type": "function", + "file": "revwalk.h", + "line": 245, + "lineto": 245, + "args": [ + { + "name": "walk", + "type": "git_revwalk *", + "comment": "the walker being used for the traversal" + }, + { + "name": "range", + "type": "const char *", + "comment": "the range" + } + ], + "argline": "git_revwalk *walk, const char *range", + "sig": "git_revwalk *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Push and hide the respective endpoints of the given range.

\n", + "comments": "

The range should be of the form .. where each is in the form accepted by 'git_revparse_single'. The left-hand commit will be hidden and the right-hand commit pushed.

\n", + "group": "revwalk" + }, + "git_revwalk_simplify_first_parent": { + "type": "function", + "file": "revwalk.h", + "line": 252, + "lineto": 252, + "args": [ + { + "name": "walk", + "type": "git_revwalk *", + "comment": null + } + ], + "argline": "git_revwalk *walk", + "sig": "git_revwalk *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Simplify the history by first-parent

\n", + "comments": "

No parents other than the first for each commit will be enqueued.

\n", + "group": "revwalk" + }, + "git_revwalk_free": { + "type": "function", + "file": "revwalk.h", + "line": 260, + "lineto": 260, + "args": [ + { + "name": "walk", + "type": "git_revwalk *", + "comment": "traversal handle to close. If NULL nothing occurs." + } + ], + "argline": "git_revwalk *walk", + "sig": "git_revwalk *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Free a revision walker previously allocated.

\n", + "comments": "", + "group": "revwalk", + "examples": { + "general.c": [ + "ex/HEAD/general.html#git_revwalk_free-66" + ], + "log.c": [ + "ex/HEAD/log.html#git_revwalk_free-57" + ] + } + }, + "git_revwalk_repository": { + "type": "function", + "file": "revwalk.h", + "line": 269, + "lineto": 269, + "args": [ + { + "name": "walk", + "type": "git_revwalk *", + "comment": "the revision walker" + } + ], + "argline": "git_revwalk *walk", + "sig": "git_revwalk *", + "return": { + "type": "git_repository *", + "comment": " the repository being walked" + }, + "description": "

Return the repository on which this walker\n is operating.

\n", + "comments": "", + "group": "revwalk" + }, + "git_revwalk_add_hide_cb": { + "type": "function", + "file": "revwalk.h", + "line": 290, + "lineto": 293, + "args": [ + { + "name": "walk", + "type": "git_revwalk *", + "comment": "the revision walker" + }, + { + "name": "hide_cb", + "type": "git_revwalk_hide_cb", + "comment": "callback function to hide a commit and its parents" + }, + { + "name": "payload", + "type": "void *", + "comment": "data payload to be passed to callback function" + } + ], + "argline": "git_revwalk *walk, git_revwalk_hide_cb hide_cb, void *payload", + "sig": "git_revwalk *::git_revwalk_hide_cb::void *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Adds a callback function to hide a commit and its parents

\n", + "comments": "", + "group": "revwalk" + }, + "git_signature_new": { + "type": "function", + "file": "signature.h", + "line": 37, + "lineto": 37, + "args": [ + { + "name": "out", + "type": "git_signature **", + "comment": "new signature, in case of error NULL" + }, + { + "name": "name", + "type": "const char *", + "comment": "name of the person" + }, + { + "name": "email", + "type": "const char *", + "comment": "email of the person" + }, + { + "name": "time", + "type": "git_time_t", + "comment": "time when the action happened" + }, + { + "name": "offset", + "type": "int", + "comment": "timezone offset in minutes for the time" + } + ], + "argline": "git_signature **out, const char *name, const char *email, git_time_t time, int offset", + "sig": "git_signature **::const char *::const char *::git_time_t::int", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Create a new action signature.

\n", + "comments": "

Call git_signature_free() to free the data.

\n\n

Note: angle brackets ('<' and '>') characters are not allowed to be used in either the name or the email parameter.

\n", + "group": "signature", + "examples": { + "general.c": [ + "ex/HEAD/general.html#git_signature_new-67", + "ex/HEAD/general.html#git_signature_new-68" + ] + } + }, + "git_signature_now": { + "type": "function", + "file": "signature.h", + "line": 49, + "lineto": 49, + "args": [ + { + "name": "out", + "type": "git_signature **", + "comment": "new signature, in case of error NULL" + }, + { + "name": "name", + "type": "const char *", + "comment": "name of the person" + }, + { + "name": "email", + "type": "const char *", + "comment": "email of the person" + } + ], + "argline": "git_signature **out, const char *name, const char *email", + "sig": "git_signature **::const char *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Create a new action signature with a timestamp of 'now'.

\n", + "comments": "

Call git_signature_free() to free the data.

\n", + "group": "signature" + }, + "git_signature_default": { + "type": "function", + "file": "signature.h", + "line": 63, + "lineto": 63, + "args": [ + { + "name": "out", + "type": "git_signature **", + "comment": "new signature" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "repository pointer" + } + ], + "argline": "git_signature **out, git_repository *repo", + "sig": "git_signature **::git_repository *", + "return": { + "type": "int", + "comment": " 0 on success, GIT_ENOTFOUND if config is missing, or error code" + }, + "description": "

Create a new action signature with default user and now timestamp.

\n", + "comments": "

This looks up the user.name and user.email from the configuration and uses the current time as the timestamp, and creates a new signature based on that information. It will return GIT_ENOTFOUND if either the user.name or user.email are not set.

\n", + "group": "signature", + "examples": { + "init.c": [ + "ex/HEAD/init.html#git_signature_default-12" + ], + "tag.c": [ + "ex/HEAD/tag.html#git_signature_default-17" + ] + } + }, + "git_signature_dup": { + "type": "function", + "file": "signature.h", + "line": 75, + "lineto": 75, + "args": [ + { + "name": "dest", + "type": "git_signature **", + "comment": "pointer where to store the copy" + }, + { + "name": "sig", + "type": "const git_signature *", + "comment": "signature to duplicate" + } + ], + "argline": "git_signature **dest, const git_signature *sig", + "sig": "git_signature **::const git_signature *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Create a copy of an existing signature. All internal strings are also\n duplicated.

\n", + "comments": "

Call git_signature_free() to free the data.

\n", + "group": "signature" + }, + "git_signature_free": { + "type": "function", + "file": "signature.h", + "line": 86, + "lineto": 86, + "args": [ + { + "name": "sig", + "type": "git_signature *", + "comment": "signature to free" + } + ], + "argline": "git_signature *sig", + "sig": "git_signature *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Free an existing signature.

\n", + "comments": "

Because the signature is not an opaque structure, it is legal to free it manually, but be sure to free the "name" and "email" strings in addition to the structure itself.

\n", + "group": "signature", + "examples": { + "init.c": [ + "ex/HEAD/init.html#git_signature_free-13" + ], + "tag.c": [ + "ex/HEAD/tag.html#git_signature_free-18" + ] + } + }, + "git_stash_apply_init_options": { + "type": "function", + "file": "stash.h", + "line": 153, + "lineto": 154, + "args": [ + { + "name": "opts", + "type": "git_stash_apply_options *", + "comment": "the `git_stash_apply_options` instance to initialize." + }, + { + "name": "version", + "type": "unsigned int", + "comment": "the version of the struct; you should pass\n `GIT_STASH_APPLY_OPTIONS_INIT` here." + } + ], + "argline": "git_stash_apply_options *opts, unsigned int version", + "sig": "git_stash_apply_options *::unsigned int", + "return": { + "type": "int", + "comment": " Zero on success; -1 on failure." + }, + "description": "

Initializes a git_stash_apply_options with default values. Equivalent to\n creating an instance with GIT_STASH_APPLY_OPTIONS_INIT.

\n", + "comments": "", + "group": "stash" + }, + "git_stash_apply": { + "type": "function", + "file": "stash.h", + "line": 182, + "lineto": 185, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "The owning repository." + }, + { + "name": "index", + "type": "size_t", + "comment": "The position within the stash list. 0 points to the\n most recent stashed state." + }, + { + "name": "options", + "type": "const git_stash_apply_options *", + "comment": "Options to control how stashes are applied." + } + ], + "argline": "git_repository *repo, size_t index, const git_stash_apply_options *options", + "sig": "git_repository *::size_t::const git_stash_apply_options *", + "return": { + "type": "int", + "comment": " 0 on success, GIT_ENOTFOUND if there's no stashed state for the\n given index, GIT_EMERGECONFLICT if changes exist in the working\n directory, or an error code" + }, + "description": "

Apply a single stashed state from the stash list.

\n", + "comments": "

If local changes in the working directory conflict with changes in the stash then GIT_EMERGECONFLICT will be returned. In this case, the index will always remain unmodified and all files in the working directory will remain unmodified. However, if you are restoring untracked files or ignored files and there is a conflict when applying the modified files, then those files will remain in the working directory.

\n\n

If passing the GIT_STASH_APPLY_REINSTATE_INDEX flag and there would be conflicts when reinstating the index, the function will return GIT_EMERGECONFLICT and both the working directory and index will be left unmodified.

\n\n

Note that a minimum checkout strategy of GIT_CHECKOUT_SAFE is implied.

\n", + "group": "stash" + }, + "git_stash_foreach": { + "type": "function", + "file": "stash.h", + "line": 218, + "lineto": 221, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "Repository where to find the stash." + }, + { + "name": "callback", + "type": "git_stash_cb", + "comment": "Callback to invoke per found stashed state. The most\n recent stash state will be enumerated first." + }, + { + "name": "payload", + "type": "void *", + "comment": "Extra parameter to callback function." + } + ], + "argline": "git_repository *repo, git_stash_cb callback, void *payload", + "sig": "git_repository *::git_stash_cb::void *", + "return": { + "type": "int", + "comment": " 0 on success, non-zero callback return value, or error code." + }, + "description": "

Loop over all the stashed states and issue a callback for each one.

\n", + "comments": "

If the callback returns a non-zero value, this will stop looping.

\n", + "group": "stash" + }, + "git_stash_drop": { + "type": "function", + "file": "stash.h", + "line": 234, + "lineto": 236, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "The owning repository." + }, + { + "name": "index", + "type": "size_t", + "comment": "The position within the stash list. 0 points to the\n most recent stashed state." + } + ], + "argline": "git_repository *repo, size_t index", + "sig": "git_repository *::size_t", + "return": { + "type": "int", + "comment": " 0 on success, GIT_ENOTFOUND if there's no stashed state for the given\n index, or error code." + }, + "description": "

Remove a single stashed state from the stash list.

\n", + "comments": "", + "group": "stash" + }, + "git_stash_pop": { + "type": "function", + "file": "stash.h", + "line": 250, + "lineto": 253, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "The owning repository." + }, + { + "name": "index", + "type": "size_t", + "comment": "The position within the stash list. 0 points to the\n most recent stashed state." + }, + { + "name": "options", + "type": "const git_stash_apply_options *", + "comment": "Options to control how stashes are applied." + } + ], + "argline": "git_repository *repo, size_t index, const git_stash_apply_options *options", + "sig": "git_repository *::size_t::const git_stash_apply_options *", + "return": { + "type": "int", + "comment": " 0 on success, GIT_ENOTFOUND if there's no stashed state for the given\n index, or error code. (see git_stash_apply() above for details)" + }, + "description": "

Apply a single stashed state from the stash list and remove it from the list\n if successful.

\n", + "comments": "", + "group": "stash" + }, + "git_status_init_options": { + "type": "function", + "file": "status.h", + "line": 195, + "lineto": 197, + "args": [ + { + "name": "opts", + "type": "git_status_options *", + "comment": "The `git_status_options` instance to initialize." + }, + { + "name": "version", + "type": "unsigned int", + "comment": "Version of struct; pass `GIT_STATUS_OPTIONS_VERSION`" + } + ], + "argline": "git_status_options *opts, unsigned int version", + "sig": "git_status_options *::unsigned int", + "return": { + "type": "int", + "comment": " Zero on success; -1 on failure." + }, + "description": "

Initializes a git_status_options with default values. Equivalent to\n creating an instance with GIT_STATUS_OPTIONS_INIT.

\n", + "comments": "", + "group": "status" + }, + "git_status_foreach": { + "type": "function", + "file": "status.h", + "line": 235, + "lineto": 238, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "A repository object" + }, + { + "name": "callback", + "type": "git_status_cb", + "comment": "The function to call on each file" + }, + { + "name": "payload", + "type": "void *", + "comment": "Pointer to pass through to callback function" + } + ], + "argline": "git_repository *repo, git_status_cb callback, void *payload", + "sig": "git_repository *::git_status_cb::void *", + "return": { + "type": "int", + "comment": " 0 on success, non-zero callback return value, or error code" + }, + "description": "

Gather file statuses and run a callback for each one.

\n", + "comments": "

The callback is passed the path of the file, the status (a combination of the git_status_t values above) and the payload data pointer passed into this function.

\n\n

If the callback returns a non-zero value, this function will stop looping and return that value to caller.

\n", + "group": "status", + "examples": { + "status.c": [ + "ex/HEAD/status.html#git_status_foreach-10" + ] + } + }, + "git_status_foreach_ext": { + "type": "function", + "file": "status.h", + "line": 259, + "lineto": 263, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "Repository object" + }, + { + "name": "opts", + "type": "const git_status_options *", + "comment": "Status options structure" + }, + { + "name": "callback", + "type": "git_status_cb", + "comment": "The function to call on each file" + }, + { + "name": "payload", + "type": "void *", + "comment": "Pointer to pass through to callback function" + } + ], + "argline": "git_repository *repo, const git_status_options *opts, git_status_cb callback, void *payload", + "sig": "git_repository *::const git_status_options *::git_status_cb::void *", + "return": { + "type": "int", + "comment": " 0 on success, non-zero callback return value, or error code" + }, + "description": "

Gather file status information and run callbacks as requested.

\n", + "comments": "

This is an extended version of the git_status_foreach() API that allows for more granular control over which paths will be processed and in what order. See the git_status_options structure for details about the additional controls that this makes available.

\n\n

Note that if a pathspec is given in the git_status_options to filter the status, then the results from rename detection (if you enable it) may not be accurate. To do rename detection properly, this must be called with no pathspec so that all files can be considered.

\n", + "group": "status", + "examples": { + "status.c": [ + "ex/HEAD/status.html#git_status_foreach_ext-11" + ] + } + }, + "git_status_file": { + "type": "function", + "file": "status.h", + "line": 291, + "lineto": 294, + "args": [ + { + "name": "status_flags", + "type": "unsigned int *", + "comment": "Output combination of git_status_t values for file" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "A repository object" + }, + { + "name": "path", + "type": "const char *", + "comment": "The exact path to retrieve status for relative to the\n repository working directory" + } + ], + "argline": "unsigned int *status_flags, git_repository *repo, const char *path", + "sig": "unsigned int *::git_repository *::const char *", + "return": { + "type": "int", + "comment": " 0 on success, GIT_ENOTFOUND if the file is not found in the HEAD,\n index, and work tree, GIT_EAMBIGUOUS if `path` matches multiple files\n or if it refers to a folder, and -1 on other errors." + }, + "description": "

Get file status for a single file.

\n", + "comments": "

This tries to get status for the filename that you give. If no files match that name (in either the HEAD, index, or working directory), this returns GIT_ENOTFOUND.

\n\n

If the name matches multiple files (for example, if the path names a directory or if running on a case- insensitive filesystem and yet the HEAD has two entries that both match the path), then this returns GIT_EAMBIGUOUS because it cannot give correct results.

\n\n

This does not do any sort of rename detection. Renames require a set of targets and because of the path filtering, there is not enough information to check renames correctly. To check file status with rename detection, there is no choice but to do a full git_status_list_new and scan through looking for the path that you are interested in.

\n", + "group": "status" + }, + "git_status_list_new": { + "type": "function", + "file": "status.h", + "line": 309, + "lineto": 312, + "args": [ + { + "name": "out", + "type": "git_status_list **", + "comment": "Pointer to store the status results in" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "Repository object" + }, + { + "name": "opts", + "type": "const git_status_options *", + "comment": "Status options structure" + } + ], + "argline": "git_status_list **out, git_repository *repo, const git_status_options *opts", + "sig": "git_status_list **::git_repository *::const git_status_options *", + "return": { + "type": "int", + "comment": " 0 on success or error code" + }, + "description": "

Gather file status information and populate the git_status_list.

\n", + "comments": "

Note that if a pathspec is given in the git_status_options to filter the status, then the results from rename detection (if you enable it) may not be accurate. To do rename detection properly, this must be called with no pathspec so that all files can be considered.

\n", + "group": "status", + "examples": { + "status.c": [ + "ex/HEAD/status.html#git_status_list_new-12", + "ex/HEAD/status.html#git_status_list_new-13" + ] + } + }, + "git_status_list_entrycount": { + "type": "function", + "file": "status.h", + "line": 323, + "lineto": 324, + "args": [ + { + "name": "statuslist", + "type": "git_status_list *", + "comment": "Existing status list object" + } + ], + "argline": "git_status_list *statuslist", + "sig": "git_status_list *", + "return": { + "type": "size_t", + "comment": " the number of status entries" + }, + "description": "

Gets the count of status entries in this list.

\n", + "comments": "

If there are no changes in status (at least according the options given when the status list was created), this can return 0.

\n", + "group": "status", + "examples": { + "status.c": [ + "ex/HEAD/status.html#git_status_list_entrycount-14", + "ex/HEAD/status.html#git_status_list_entrycount-15" + ] + } + }, + "git_status_byindex": { + "type": "function", + "file": "status.h", + "line": 335, + "lineto": 337, + "args": [ + { + "name": "statuslist", + "type": "git_status_list *", + "comment": "Existing status list object" + }, + { + "name": "idx", + "type": "size_t", + "comment": "Position of the entry" + } + ], + "argline": "git_status_list *statuslist, size_t idx", + "sig": "git_status_list *::size_t", + "return": { + "type": "const git_status_entry *", + "comment": " Pointer to the entry; NULL if out of bounds" + }, + "description": "

Get a pointer to one of the entries in the status list.

\n", + "comments": "

The entry is not modifiable and should not be freed.

\n", + "group": "status", + "examples": { + "status.c": [ + "ex/HEAD/status.html#git_status_byindex-16", + "ex/HEAD/status.html#git_status_byindex-17", + "ex/HEAD/status.html#git_status_byindex-18", + "ex/HEAD/status.html#git_status_byindex-19", + "ex/HEAD/status.html#git_status_byindex-20", + "ex/HEAD/status.html#git_status_byindex-21" + ] + } + }, + "git_status_list_free": { + "type": "function", + "file": "status.h", + "line": 344, + "lineto": 345, + "args": [ + { + "name": "statuslist", + "type": "git_status_list *", + "comment": "Existing status list object" + } + ], + "argline": "git_status_list *statuslist", + "sig": "git_status_list *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Free an existing status list

\n", + "comments": "", + "group": "status", + "examples": { + "status.c": [ + "ex/HEAD/status.html#git_status_list_free-22" + ] + } + }, + "git_status_should_ignore": { + "type": "function", + "file": "status.h", + "line": 363, + "lineto": 366, + "args": [ + { + "name": "ignored", + "type": "int *", + "comment": "Boolean returning 0 if the file is not ignored, 1 if it is" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "A repository object" + }, + { + "name": "path", + "type": "const char *", + "comment": "The file to check ignores for, rooted at the repo's workdir." + } + ], + "argline": "int *ignored, git_repository *repo, const char *path", + "sig": "int *::git_repository *::const char *", + "return": { + "type": "int", + "comment": " 0 if ignore rules could be processed for the file (regardless\n of whether it exists or not), or an error \n<\n 0 if they could not." + }, + "description": "

Test if the ignore rules apply to a given file.

\n", + "comments": "

This function checks the ignore rules to see if they would apply to the given file. This indicates if the file would be ignored regardless of whether the file is already in the index or committed to the repository.

\n\n

One way to think of this is if you were to do "git add ." on the directory containing the file, would it be added or not?

\n", + "group": "status" + }, + "git_strarray_free": { + "type": "function", + "file": "strarray.h", + "line": 41, + "lineto": 41, + "args": [ + { + "name": "array", + "type": "git_strarray *", + "comment": "git_strarray from which to free string data" + } + ], + "argline": "git_strarray *array", + "sig": "git_strarray *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Close a string array object

\n", + "comments": "

This method should be called on git_strarray objects where the strings array is allocated and contains allocated strings, such as what you would get from git_strarray_copy(). Not doing so, will result in a memory leak.

\n\n

This does not free the git_strarray itself, since the library will never allocate that object directly itself (it is more commonly embedded inside another struct or created on the stack).

\n", + "group": "strarray", + "examples": { + "general.c": [ + "ex/HEAD/general.html#git_strarray_free-69" + ], + "remote.c": [ + "ex/HEAD/remote.html#git_strarray_free-16", + "ex/HEAD/remote.html#git_strarray_free-17" + ], + "tag.c": [ + "ex/HEAD/tag.html#git_strarray_free-19" + ] + } + }, + "git_strarray_copy": { + "type": "function", + "file": "strarray.h", + "line": 53, + "lineto": 53, + "args": [ + { + "name": "tgt", + "type": "git_strarray *", + "comment": "target" + }, + { + "name": "src", + "type": "const git_strarray *", + "comment": "source" + } + ], + "argline": "git_strarray *tgt, const git_strarray *src", + "sig": "git_strarray *::const git_strarray *", + "return": { + "type": "int", + "comment": " 0 on success, \n<\n 0 on allocation failure" + }, + "description": "

Copy a string array object from source to target.

\n", + "comments": "

Note: target is overwritten and hence should be empty, otherwise its contents are leaked. Call git_strarray_free() if necessary.

\n", + "group": "strarray" + }, + "git_submodule_update_init_options": { + "type": "function", + "file": "submodule.h", + "line": 173, + "lineto": 174, + "args": [ + { + "name": "opts", + "type": "git_submodule_update_options *", + "comment": "The `git_submodule_update_options` instance to initialize." + }, + { + "name": "version", + "type": "unsigned int", + "comment": "Version of struct; pass `GIT_SUBMODULE_UPDATE_OPTIONS_VERSION`" + } + ], + "argline": "git_submodule_update_options *opts, unsigned int version", + "sig": "git_submodule_update_options *::unsigned int", + "return": { + "type": "int", + "comment": " Zero on success; -1 on failure." + }, + "description": "

Initializes a git_submodule_update_options with default values.\n Equivalent to creating an instance with GIT_SUBMODULE_UPDATE_OPTIONS_INIT.

\n", + "comments": "", + "group": "submodule" + }, + "git_submodule_update": { + "type": "function", + "file": "submodule.h", + "line": 192, + "lineto": 192, + "args": [ + { + "name": "submodule", + "type": "git_submodule *", + "comment": "Submodule object" + }, + { + "name": "init", + "type": "int", + "comment": "If the submodule is not initialized, setting this flag to true\n will initialize the submodule before updating. Otherwise, this will\n return an error if attempting to update an uninitialzed repository.\n but setting this to true forces them to be updated." + }, + { + "name": "options", + "type": "git_submodule_update_options *", + "comment": "configuration options for the update. If NULL, the\n function works as though GIT_SUBMODULE_UPDATE_OPTIONS_INIT was passed." + } + ], + "argline": "git_submodule *submodule, int init, git_submodule_update_options *options", + "sig": "git_submodule *::int::git_submodule_update_options *", + "return": { + "type": "int", + "comment": " 0 on success, any non-zero return value from a callback\n function, or a negative value to indicate an error (use\n `giterr_last` for a detailed error message)." + }, + "description": "

Update a submodule. This will clone a missing submodule and\n checkout the subrepository to the commit specified in the index of\n containing repository.

\n", + "comments": "", + "group": "submodule" + }, + "git_submodule_lookup": { + "type": "function", + "file": "submodule.h", + "line": 221, + "lineto": 224, + "args": [ + { + "name": "out", + "type": "git_submodule **", + "comment": "Output ptr to submodule; pass NULL to just get return code" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "The parent repository" + }, + { + "name": "name", + "type": "const char *", + "comment": "The name of or path to the submodule; trailing slashes okay" + } + ], + "argline": "git_submodule **out, git_repository *repo, const char *name", + "sig": "git_submodule **::git_repository *::const char *", + "return": { + "type": "int", + "comment": " 0 on success, GIT_ENOTFOUND if submodule does not exist,\n GIT_EEXISTS if a repository is found in working directory only,\n -1 on other errors." + }, + "description": "

Lookup submodule information by name or path.

\n", + "comments": "

Given either the submodule name or path (they are usually the same), this returns a structure describing the submodule.

\n\n

There are two expected error scenarios:

\n\n
    \n
  • The submodule is not mentioned in the HEAD, the index, and the config, but does "exist" in the working directory (i.e. there is a subdirectory that appears to be a Git repository). In this case, this function returns GIT_EEXISTS to indicate a sub-repository exists but not in a state where a git_submodule can be instantiated. - The submodule is not mentioned in the HEAD, index, or config and the working directory doesn't contain a value git repo at that path. There may or may not be anything else at that path, but nothing that looks like a submodule. In this case, this returns GIT_ENOTFOUND.
  • \n
\n\n

You must call git_submodule_free when done with the submodule.

\n", + "group": "submodule" + }, + "git_submodule_free": { + "type": "function", + "file": "submodule.h", + "line": 231, + "lineto": 231, + "args": [ + { + "name": "submodule", + "type": "git_submodule *", + "comment": "Submodule object" + } + ], + "argline": "git_submodule *submodule", + "sig": "git_submodule *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Release a submodule

\n", + "comments": "", + "group": "submodule" + }, + "git_submodule_foreach": { + "type": "function", + "file": "submodule.h", + "line": 251, + "lineto": 254, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "The repository" + }, + { + "name": "callback", + "type": "git_submodule_cb", + "comment": "Function to be called with the name of each submodule.\n Return a non-zero value to terminate the iteration." + }, + { + "name": "payload", + "type": "void *", + "comment": "Extra data to pass to callback" + } + ], + "argline": "git_repository *repo, git_submodule_cb callback, void *payload", + "sig": "git_repository *::git_submodule_cb::void *", + "return": { + "type": "int", + "comment": " 0 on success, -1 on error, or non-zero return value of callback" + }, + "description": "

Iterate over all tracked submodules of a repository.

\n", + "comments": "

See the note on git_submodule above. This iterates over the tracked submodules as described therein.

\n\n

If you are concerned about items in the working directory that look like submodules but are not tracked, the diff API will generate a diff record for workdir items that look like submodules but are not tracked, showing them as added in the workdir. Also, the status API will treat the entire subdirectory of a contained git repo as a single GIT_STATUS_WT_NEW item.

\n", + "group": "submodule", + "examples": { + "status.c": [ + "ex/HEAD/status.html#git_submodule_foreach-23" + ] + } + }, + "git_submodule_add_setup": { + "type": "function", + "file": "submodule.h", + "line": 281, + "lineto": 286, + "args": [ + { + "name": "out", + "type": "git_submodule **", + "comment": "The newly created submodule ready to open for clone" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "The repository in which you want to create the submodule" + }, + { + "name": "url", + "type": "const char *", + "comment": "URL for the submodule's remote" + }, + { + "name": "path", + "type": "const char *", + "comment": "Path at which the submodule should be created" + }, + { + "name": "use_gitlink", + "type": "int", + "comment": "Should workdir contain a gitlink to the repo in\n .git/modules vs. repo directly in workdir." + } + ], + "argline": "git_submodule **out, git_repository *repo, const char *url, const char *path, int use_gitlink", + "sig": "git_submodule **::git_repository *::const char *::const char *::int", + "return": { + "type": "int", + "comment": " 0 on success, GIT_EEXISTS if submodule already exists,\n -1 on other errors." + }, + "description": "

Set up a new git submodule for checkout.

\n", + "comments": "

This does "git submodule add" up to the fetch and checkout of the submodule contents. It preps a new submodule, creates an entry in .gitmodules and creates an empty initialized repository either at the given path in the working directory or in .git/modules with a gitlink from the working directory to the new repo.

\n\n

To fully emulate "git submodule add" call this function, then open the submodule repo and perform the clone step as needed. Lastly, call git_submodule_add_finalize() to wrap up adding the new submodule and .gitmodules to the index to be ready to commit.

\n\n

You must call git_submodule_free on the submodule object when done.

\n", + "group": "submodule" + }, + "git_submodule_add_finalize": { + "type": "function", + "file": "submodule.h", + "line": 298, + "lineto": 298, + "args": [ + { + "name": "submodule", + "type": "git_submodule *", + "comment": "The submodule to finish adding." + } + ], + "argline": "git_submodule *submodule", + "sig": "git_submodule *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Resolve the setup of a new git submodule.

\n", + "comments": "

This should be called on a submodule once you have called add setup and done the clone of the submodule. This adds the .gitmodules file and the newly cloned submodule to the index to be ready to be committed (but doesn't actually do the commit).

\n", + "group": "submodule" + }, + "git_submodule_add_to_index": { + "type": "function", + "file": "submodule.h", + "line": 310, + "lineto": 312, + "args": [ + { + "name": "submodule", + "type": "git_submodule *", + "comment": "The submodule to add to the index" + }, + { + "name": "write_index", + "type": "int", + "comment": "Boolean if this should immediately write the index\n file. If you pass this as false, you will have to get the\n git_index and explicitly call `git_index_write()` on it to\n save the change." + } + ], + "argline": "git_submodule *submodule, int write_index", + "sig": "git_submodule *::int", + "return": { + "type": "int", + "comment": " 0 on success, \n<\n0 on failure" + }, + "description": "

Add current submodule HEAD commit to index of superproject.

\n", + "comments": "", + "group": "submodule" + }, + "git_submodule_owner": { + "type": "function", + "file": "submodule.h", + "line": 325, + "lineto": 325, + "args": [ + { + "name": "submodule", + "type": "git_submodule *", + "comment": "Pointer to submodule object" + } + ], + "argline": "git_submodule *submodule", + "sig": "git_submodule *", + "return": { + "type": "git_repository *", + "comment": " Pointer to `git_repository`" + }, + "description": "

Get the containing repository for a submodule.

\n", + "comments": "

This returns a pointer to the repository that contains the submodule. This is a just a reference to the repository that was passed to the original git_submodule_lookup() call, so if that repository has been freed, then this may be a dangling reference.

\n", + "group": "submodule" + }, + "git_submodule_name": { + "type": "function", + "file": "submodule.h", + "line": 333, + "lineto": 333, + "args": [ + { + "name": "submodule", + "type": "git_submodule *", + "comment": "Pointer to submodule object" + } + ], + "argline": "git_submodule *submodule", + "sig": "git_submodule *", + "return": { + "type": "const char *", + "comment": " Pointer to the submodule name" + }, + "description": "

Get the name of submodule.

\n", + "comments": "", + "group": "submodule", + "examples": { + "status.c": [ + "ex/HEAD/status.html#git_submodule_name-24" + ] + } + }, + "git_submodule_path": { + "type": "function", + "file": "submodule.h", + "line": 344, + "lineto": 344, + "args": [ + { + "name": "submodule", + "type": "git_submodule *", + "comment": "Pointer to submodule object" + } + ], + "argline": "git_submodule *submodule", + "sig": "git_submodule *", + "return": { + "type": "const char *", + "comment": " Pointer to the submodule path" + }, + "description": "

Get the path to the submodule.

\n", + "comments": "

The path is almost always the same as the submodule name, but the two are actually not required to match.

\n", + "group": "submodule", + "examples": { + "status.c": [ + "ex/HEAD/status.html#git_submodule_path-25" + ] + } + }, + "git_submodule_url": { + "type": "function", + "file": "submodule.h", + "line": 352, + "lineto": 352, + "args": [ + { + "name": "submodule", + "type": "git_submodule *", + "comment": "Pointer to submodule object" + } + ], + "argline": "git_submodule *submodule", + "sig": "git_submodule *", + "return": { + "type": "const char *", + "comment": " Pointer to the submodule url" + }, + "description": "

Get the URL for the submodule.

\n", + "comments": "", + "group": "submodule" + }, + "git_submodule_resolve_url": { + "type": "function", + "file": "submodule.h", + "line": 362, + "lineto": 362, + "args": [ + { + "name": "out", + "type": "git_buf *", + "comment": "buffer to store the absolute submodule url in" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "Pointer to repository object" + }, + { + "name": "url", + "type": "const char *", + "comment": "Relative url" + } + ], + "argline": "git_buf *out, git_repository *repo, const char *url", + "sig": "git_buf *::git_repository *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Resolve a submodule url relative to the given repository.

\n", + "comments": "", + "group": "submodule" + }, + "git_submodule_branch": { + "type": "function", + "file": "submodule.h", + "line": 370, + "lineto": 370, + "args": [ + { + "name": "submodule", + "type": "git_submodule *", + "comment": "Pointer to submodule object" + } + ], + "argline": "git_submodule *submodule", + "sig": "git_submodule *", + "return": { + "type": "const char *", + "comment": " Pointer to the submodule branch" + }, + "description": "

Get the branch for the submodule.

\n", + "comments": "", + "group": "submodule" + }, + "git_submodule_set_branch": { + "type": "function", + "file": "submodule.h", + "line": 383, + "lineto": 383, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository to affect" + }, + { + "name": "name", + "type": "const char *", + "comment": "the name of the submodule to configure" + }, + { + "name": "branch", + "type": "const char *", + "comment": "Branch that should be used for the submodule" + } + ], + "argline": "git_repository *repo, const char *name, const char *branch", + "sig": "git_repository *::const char *::const char *", + "return": { + "type": "int", + "comment": " 0 on success, \n<\n0 on failure" + }, + "description": "

Set the branch for the submodule in the configuration

\n", + "comments": "

After calling this, you may wish to call git_submodule_sync() to write the changes to the checked out submodule repository.

\n", + "group": "submodule" + }, + "git_submodule_set_url": { + "type": "function", + "file": "submodule.h", + "line": 397, + "lineto": 397, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository to affect" + }, + { + "name": "name", + "type": "const char *", + "comment": "the name of the submodule to configure" + }, + { + "name": "url", + "type": "const char *", + "comment": "URL that should be used for the submodule" + } + ], + "argline": "git_repository *repo, const char *name, const char *url", + "sig": "git_repository *::const char *::const char *", + "return": { + "type": "int", + "comment": " 0 on success, \n<\n0 on failure" + }, + "description": "

Set the URL for the submodule in the configuration

\n", + "comments": "

After calling this, you may wish to call git_submodule_sync() to write the changes to the checked out submodule repository.

\n", + "group": "submodule" + }, + "git_submodule_index_id": { + "type": "function", + "file": "submodule.h", + "line": 405, + "lineto": 405, + "args": [ + { + "name": "submodule", + "type": "git_submodule *", + "comment": "Pointer to submodule object" + } + ], + "argline": "git_submodule *submodule", + "sig": "git_submodule *", + "return": { + "type": "const git_oid *", + "comment": " Pointer to git_oid or NULL if submodule is not in index." + }, + "description": "

Get the OID for the submodule in the index.

\n", + "comments": "", + "group": "submodule" + }, + "git_submodule_head_id": { + "type": "function", + "file": "submodule.h", + "line": 413, + "lineto": 413, + "args": [ + { + "name": "submodule", + "type": "git_submodule *", + "comment": "Pointer to submodule object" + } + ], + "argline": "git_submodule *submodule", + "sig": "git_submodule *", + "return": { + "type": "const git_oid *", + "comment": " Pointer to git_oid or NULL if submodule is not in the HEAD." + }, + "description": "

Get the OID for the submodule in the current HEAD tree.

\n", + "comments": "", + "group": "submodule" + }, + "git_submodule_wd_id": { + "type": "function", + "file": "submodule.h", + "line": 426, + "lineto": 426, + "args": [ + { + "name": "submodule", + "type": "git_submodule *", + "comment": "Pointer to submodule object" + } + ], + "argline": "git_submodule *submodule", + "sig": "git_submodule *", + "return": { + "type": "const git_oid *", + "comment": " Pointer to git_oid or NULL if submodule is not checked out." + }, + "description": "

Get the OID for the submodule in the current working directory.

\n", + "comments": "

This returns the OID that corresponds to looking up 'HEAD' in the checked out submodule. If there are pending changes in the index or anything else, this won't notice that. You should call git_submodule_status() for a more complete picture about the state of the working directory.

\n", + "group": "submodule" + }, + "git_submodule_ignore": { + "type": "function", + "file": "submodule.h", + "line": 451, + "lineto": 452, + "args": [ + { + "name": "submodule", + "type": "git_submodule *", + "comment": "The submodule to check" + } + ], + "argline": "git_submodule *submodule", + "sig": "git_submodule *", + "return": { + "type": "git_submodule_ignore_t", + "comment": " The current git_submodule_ignore_t valyue what will be used for\n this submodule." + }, + "description": "

Get the ignore rule that will be used for the submodule.

\n", + "comments": "

These values control the behavior of git_submodule_status() for this submodule. There are four ignore values:

\n\n
    \n
  • GIT_SUBMODULE_IGNORE_NONE will consider any change to the contents of the submodule from a clean checkout to be dirty, including the addition of untracked files. This is the default if unspecified. - GIT_SUBMODULE_IGNORE_UNTRACKED examines the contents of the working tree (i.e. call git_status_foreach() on the submodule) but UNTRACKED files will not count as making the submodule dirty. - GIT_SUBMODULE_IGNORE_DIRTY means to only check if the HEAD of the submodule has moved for status. This is fast since it does not need to scan the working tree of the submodule at all. - GIT_SUBMODULE_IGNORE_ALL means not to open the submodule repo. The working directory will be consider clean so long as there is a checked out version present.
  • \n
\n", + "group": "submodule" + }, + "git_submodule_set_ignore": { + "type": "function", + "file": "submodule.h", + "line": 464, + "lineto": 467, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository to affect" + }, + { + "name": "name", + "type": "const char *", + "comment": "the name of the submdule" + }, + { + "name": "ignore", + "type": "git_submodule_ignore_t", + "comment": "The new value for the ignore rule" + } + ], + "argline": "git_repository *repo, const char *name, git_submodule_ignore_t ignore", + "sig": "git_repository *::const char *::git_submodule_ignore_t", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Set the ignore rule for the submodule in the configuration

\n", + "comments": "

This does not affect any currently-loaded instances.

\n", + "group": "submodule" + }, + "git_submodule_update_strategy": { + "type": "function", + "file": "submodule.h", + "line": 479, + "lineto": 480, + "args": [ + { + "name": "submodule", + "type": "git_submodule *", + "comment": "The submodule to check" + } + ], + "argline": "git_submodule *submodule", + "sig": "git_submodule *", + "return": { + "type": "git_submodule_update_t", + "comment": " The current git_submodule_update_t value that will be used\n for this submodule." + }, + "description": "

Get the update rule that will be used for the submodule.

\n", + "comments": "

This value controls the behavior of the git submodule update command. There are four useful values documented with git_submodule_update_t.

\n", + "group": "submodule" + }, + "git_submodule_set_update": { + "type": "function", + "file": "submodule.h", + "line": 492, + "lineto": 495, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository to affect" + }, + { + "name": "name", + "type": "const char *", + "comment": "the name of the submodule to configure" + }, + { + "name": "update", + "type": "git_submodule_update_t", + "comment": "The new value to use" + } + ], + "argline": "git_repository *repo, const char *name, git_submodule_update_t update", + "sig": "git_repository *::const char *::git_submodule_update_t", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Set the update rule for the submodule in the configuration

\n", + "comments": "

This setting won't affect any existing instances.

\n", + "group": "submodule" + }, + "git_submodule_fetch_recurse_submodules": { + "type": "function", + "file": "submodule.h", + "line": 508, + "lineto": 509, + "args": [ + { + "name": "submodule", + "type": "git_submodule *", + "comment": null + } + ], + "argline": "git_submodule *submodule", + "sig": "git_submodule *", + "return": { + "type": "git_submodule_recurse_t", + "comment": " 0 if fetchRecurseSubmodules is false, 1 if true" + }, + "description": "

Read the fetchRecurseSubmodules rule for a submodule.

\n", + "comments": "

This accesses the submodule..fetchRecurseSubmodules value for the submodule that controls fetching behavior for the submodule.

\n\n

Note that at this time, libgit2 does not honor this setting and the fetch functionality current ignores submodules.

\n", + "group": "submodule" + }, + "git_submodule_set_fetch_recurse_submodules": { + "type": "function", + "file": "submodule.h", + "line": 521, + "lineto": 524, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository to affect" + }, + { + "name": "name", + "type": "const char *", + "comment": "the submodule to configure" + }, + { + "name": "fetch_recurse_submodules", + "type": "git_submodule_recurse_t", + "comment": "Boolean value" + } + ], + "argline": "git_repository *repo, const char *name, git_submodule_recurse_t fetch_recurse_submodules", + "sig": "git_repository *::const char *::git_submodule_recurse_t", + "return": { + "type": "int", + "comment": " old value for fetchRecurseSubmodules" + }, + "description": "

Set the fetchRecurseSubmodules rule for a submodule in the configuration

\n", + "comments": "

This setting won't affect any existing instances.

\n", + "group": "submodule" + }, + "git_submodule_init": { + "type": "function", + "file": "submodule.h", + "line": 539, + "lineto": 539, + "args": [ + { + "name": "submodule", + "type": "git_submodule *", + "comment": "The submodule to write into the superproject config" + }, + { + "name": "overwrite", + "type": "int", + "comment": "By default, existing entries will not be overwritten,\n but setting this to true forces them to be updated." + } + ], + "argline": "git_submodule *submodule, int overwrite", + "sig": "git_submodule *::int", + "return": { + "type": "int", + "comment": " 0 on success, \n<\n0 on failure." + }, + "description": "

Copy submodule info into ".git/config" file.

\n", + "comments": "

Just like "git submodule init", this copies information about the submodule into ".git/config". You can use the accessor functions above to alter the in-memory git_submodule object and control what is written to the config, overriding what is in .gitmodules.

\n", + "group": "submodule" + }, + "git_submodule_repo_init": { + "type": "function", + "file": "submodule.h", + "line": 554, + "lineto": 557, + "args": [ + { + "name": "out", + "type": "git_repository **", + "comment": "Output pointer to the created git repository." + }, + { + "name": "sm", + "type": "const git_submodule *", + "comment": "The submodule to create a new subrepository from." + }, + { + "name": "use_gitlink", + "type": "int", + "comment": "Should the workdir contain a gitlink to\n the repo in .git/modules vs. repo directly in workdir." + } + ], + "argline": "git_repository **out, const git_submodule *sm, int use_gitlink", + "sig": "git_repository **::const git_submodule *::int", + "return": { + "type": "int", + "comment": " 0 on success, \n<\n0 on failure." + }, + "description": "

Set up the subrepository for a submodule in preparation for clone.

\n", + "comments": "

This function can be called to init and set up a submodule repository from a submodule in preparation to clone it from its remote.

\n", + "group": "submodule" + }, + "git_submodule_sync": { + "type": "function", + "file": "submodule.h", + "line": 567, + "lineto": 567, + "args": [ + { + "name": "submodule", + "type": "git_submodule *", + "comment": null + } + ], + "argline": "git_submodule *submodule", + "sig": "git_submodule *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Copy submodule remote info into submodule repo.

\n", + "comments": "

This copies the information about the submodules URL into the checked out submodule config, acting like "git submodule sync". This is useful if you have altered the URL for the submodule (or it has been altered by a fetch of upstream changes) and you need to update your local repo.

\n", + "group": "submodule" + }, + "git_submodule_open": { + "type": "function", + "file": "submodule.h", + "line": 581, + "lineto": 583, + "args": [ + { + "name": "repo", + "type": "git_repository **", + "comment": "Pointer to the submodule repo which was opened" + }, + { + "name": "submodule", + "type": "git_submodule *", + "comment": "Submodule to be opened" + } + ], + "argline": "git_repository **repo, git_submodule *submodule", + "sig": "git_repository **::git_submodule *", + "return": { + "type": "int", + "comment": " 0 on success, \n<\n0 if submodule repo could not be opened." + }, + "description": "

Open the repository for a submodule.

\n", + "comments": "

This is a newly opened repository object. The caller is responsible for calling git_repository_free() on it when done. Multiple calls to this function will return distinct git_repository objects. This will only work if the submodule is checked out into the working directory.

\n", + "group": "submodule" + }, + "git_submodule_reload": { + "type": "function", + "file": "submodule.h", + "line": 595, + "lineto": 595, + "args": [ + { + "name": "submodule", + "type": "git_submodule *", + "comment": "The submodule to reload" + }, + { + "name": "force", + "type": "int", + "comment": "Force reload even if the data doesn't seem out of date" + } + ], + "argline": "git_submodule *submodule, int force", + "sig": "git_submodule *::int", + "return": { + "type": "int", + "comment": " 0 on success, \n<\n0 on error" + }, + "description": "

Reread submodule info from config, index, and HEAD.

\n", + "comments": "

Call this to reread cached submodule information for this submodule if you have reason to believe that it has changed.

\n", + "group": "submodule" + }, + "git_submodule_status": { + "type": "function", + "file": "submodule.h", + "line": 611, + "lineto": 615, + "args": [ + { + "name": "status", + "type": "unsigned int *", + "comment": "Combination of `GIT_SUBMODULE_STATUS` flags" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository in which to look" + }, + { + "name": "name", + "type": "const char *", + "comment": "name of the submodule" + }, + { + "name": "ignore", + "type": "git_submodule_ignore_t", + "comment": "the ignore rules to follow" + } + ], + "argline": "unsigned int *status, git_repository *repo, const char *name, git_submodule_ignore_t ignore", + "sig": "unsigned int *::git_repository *::const char *::git_submodule_ignore_t", + "return": { + "type": "int", + "comment": " 0 on success, \n<\n0 on error" + }, + "description": "

Get the status for a submodule.

\n", + "comments": "

This looks at a submodule and tries to determine the status. It will return a combination of the GIT_SUBMODULE_STATUS values above. How deeply it examines the working directory to do this will depend on the git_submodule_ignore_t value for the submodule.

\n", + "group": "submodule", + "examples": { + "status.c": [ + "ex/HEAD/status.html#git_submodule_status-26" + ] + } + }, + "git_submodule_location": { + "type": "function", + "file": "submodule.h", + "line": 631, + "lineto": 633, + "args": [ + { + "name": "location_status", + "type": "unsigned int *", + "comment": "Combination of first four `GIT_SUBMODULE_STATUS` flags" + }, + { + "name": "submodule", + "type": "git_submodule *", + "comment": "Submodule for which to get status" + } + ], + "argline": "unsigned int *location_status, git_submodule *submodule", + "sig": "unsigned int *::git_submodule *", + "return": { + "type": "int", + "comment": " 0 on success, \n<\n0 on error" + }, + "description": "

Get the locations of submodule information.

\n", + "comments": "

This is a bit like a very lightweight version of git_submodule_status. It just returns a made of the first four submodule status values (i.e. the ones like GIT_SUBMODULE_STATUS_IN_HEAD, etc) that tell you where the submodule data comes from (i.e. the HEAD commit, gitmodules file, etc.). This can be useful if you want to know if the submodule is present in the working directory at this point in time, etc.

\n", + "group": "submodule" + }, + "git_commit_create_from_callback": { + "type": "function", + "file": "sys/commit.h", + "line": 66, + "lineto": 76, + "args": [ + { + "name": "id", + "type": "git_oid *", + "comment": null + }, + { + "name": "repo", + "type": "git_repository *", + "comment": null + }, + { + "name": "update_ref", + "type": "const char *", + "comment": null + }, + { + "name": "author", + "type": "const git_signature *", + "comment": null + }, + { + "name": "committer", + "type": "const git_signature *", + "comment": null + }, + { + "name": "message_encoding", + "type": "const char *", + "comment": null + }, + { + "name": "message", + "type": "const char *", + "comment": null + }, + { + "name": "tree", + "type": "const git_oid *", + "comment": null + }, + { + "name": "parent_cb", + "type": "git_commit_parent_callback", + "comment": null + }, + { + "name": "parent_payload", + "type": "void *", + "comment": null + } + ], + "argline": "git_oid *id, git_repository *repo, const char *update_ref, const git_signature *author, const git_signature *committer, const char *message_encoding, const char *message, const git_oid *tree, git_commit_parent_callback parent_cb, void *parent_payload", + "sig": "git_oid *::git_repository *::const char *::const git_signature *::const git_signature *::const char *::const char *::const git_oid *::git_commit_parent_callback::void *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Create a new commit in the repository with an callback to supply parents.

\n", + "comments": "

See documentation for git_commit_create() for information about the parameters, as the meaning is identical excepting that tree takes a git_oid and doesn't check for validity, and parent_cb is invoked with parent_payload and should return git_oid values or NULL to indicate that all parents are accounted for.

\n", + "group": "commit" + }, + "git_config_init_backend": { + "type": "function", + "file": "sys/config.h", + "line": 97, + "lineto": 99, + "args": [ + { + "name": "backend", + "type": "git_config_backend *", + "comment": "the `git_config_backend` struct to initialize." + }, + { + "name": "version", + "type": "unsigned int", + "comment": "Version of struct; pass `GIT_CONFIG_BACKEND_VERSION`" + } + ], + "argline": "git_config_backend *backend, unsigned int version", + "sig": "git_config_backend *::unsigned int", + "return": { + "type": "int", + "comment": " Zero on success; -1 on failure." + }, + "description": "

Initializes a git_config_backend with default values. Equivalent to\n creating an instance with GIT_CONFIG_BACKEND_INIT.

\n", + "comments": "", + "group": "config" + }, + "git_config_add_backend": { + "type": "function", + "file": "sys/config.h", + "line": 119, + "lineto": 123, + "args": [ + { + "name": "cfg", + "type": "git_config *", + "comment": "the configuration to add the file to" + }, + { + "name": "file", + "type": "git_config_backend *", + "comment": "the configuration file (backend) to add" + }, + { + "name": "level", + "type": "git_config_level_t", + "comment": "the priority level of the backend" + }, + { + "name": "force", + "type": "int", + "comment": "if a config file already exists for the given\n priority level, replace it" + } + ], + "argline": "git_config *cfg, git_config_backend *file, git_config_level_t level, int force", + "sig": "git_config *::git_config_backend *::git_config_level_t::int", + "return": { + "type": "int", + "comment": " 0 on success, GIT_EEXISTS when adding more than one file\n for a given priority level (and force_replace set to 0), or error code" + }, + "description": "

Add a generic config file instance to an existing config

\n", + "comments": "

Note that the configuration object will free the file automatically.

\n\n

Further queries on this config object will access each of the config file instances in order (instances with a higher priority level will be accessed first).

\n", + "group": "config" + }, + "git_diff_print_callback__to_buf": { + "type": "function", + "file": "sys/diff.h", + "line": 37, + "lineto": 41, + "args": [ + { + "name": "delta", + "type": "const git_diff_delta *", + "comment": null + }, + { + "name": "hunk", + "type": "const git_diff_hunk *", + "comment": null + }, + { + "name": "line", + "type": "const git_diff_line *", + "comment": null + }, + { + "name": "payload", + "type": "void *", + "comment": null + } + ], + "argline": "const git_diff_delta *delta, const git_diff_hunk *hunk, const git_diff_line *line, void *payload", + "sig": "const git_diff_delta *::const git_diff_hunk *::const git_diff_line *::void *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Diff print callback that writes to a git_buf.

\n", + "comments": "

This function is provided not for you to call it directly, but instead so you can use it as a function pointer to the git_diff_print or git_patch_print APIs. When using those APIs, you specify a callback to actually handle the diff and/or patch data.

\n\n

Use this callback to easily write that data to a git_buf buffer. You must pass a git_buf * value as the payload to the git_diff_print and/or git_patch_print function. The data will be appended to the buffer (after any existing content).

\n", + "group": "diff" + }, + "git_diff_print_callback__to_file_handle": { + "type": "function", + "file": "sys/diff.h", + "line": 57, + "lineto": 61, + "args": [ + { + "name": "delta", + "type": "const git_diff_delta *", + "comment": null + }, + { + "name": "hunk", + "type": "const git_diff_hunk *", + "comment": null + }, + { + "name": "line", + "type": "const git_diff_line *", + "comment": null + }, + { + "name": "payload", + "type": "void *", + "comment": null + } + ], + "argline": "const git_diff_delta *delta, const git_diff_hunk *hunk, const git_diff_line *line, void *payload", + "sig": "const git_diff_delta *::const git_diff_hunk *::const git_diff_line *::void *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Diff print callback that writes to stdio FILE handle.

\n", + "comments": "

This function is provided not for you to call it directly, but instead so you can use it as a function pointer to the git_diff_print or git_patch_print APIs. When using those APIs, you specify a callback to actually handle the diff and/or patch data.

\n\n

Use this callback to easily write that data to a stdio FILE handle. You must pass a FILE * value (such as stdout or stderr or the return value from fopen()) as the payload to the git_diff_print and/or git_patch_print function. If you pass NULL, this will write data to stdout.

\n", + "group": "diff" + }, + "git_diff_get_perfdata": { + "type": "function", + "file": "sys/diff.h", + "line": 83, + "lineto": 84, + "args": [ + { + "name": "out", + "type": "git_diff_perfdata *", + "comment": "Structure to be filled with diff performance data" + }, + { + "name": "diff", + "type": "const git_diff *", + "comment": "Diff to read performance data from" + } + ], + "argline": "git_diff_perfdata *out, const git_diff *diff", + "sig": "git_diff_perfdata *::const git_diff *", + "return": { + "type": "int", + "comment": " 0 for success, \n<\n0 for error" + }, + "description": "

Get performance data for a diff object.

\n", + "comments": "", + "group": "diff" + }, + "git_status_list_get_perfdata": { + "type": "function", + "file": "sys/diff.h", + "line": 89, + "lineto": 90, + "args": [ + { + "name": "out", + "type": "git_diff_perfdata *", + "comment": null + }, + { + "name": "status", + "type": "const git_status_list *", + "comment": null + } + ], + "argline": "git_diff_perfdata *out, const git_status_list *status", + "sig": "git_diff_perfdata *::const git_status_list *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Get performance data for diffs from a git_status_list

\n", + "comments": "", + "group": "status" + }, + "git_filter_lookup": { + "type": "function", + "file": "sys/filter.h", + "line": 27, + "lineto": 27, + "args": [ + { + "name": "name", + "type": "const char *", + "comment": "The name of the filter" + } + ], + "argline": "const char *name", + "sig": "const char *", + "return": { + "type": "git_filter *", + "comment": " Pointer to the filter object or NULL if not found" + }, + "description": "

Look up a filter by name

\n", + "comments": "", + "group": "filter" + }, + "git_filter_list_new": { + "type": "function", + "file": "sys/filter.h", + "line": 57, + "lineto": 61, + "args": [ + { + "name": "out", + "type": "git_filter_list **", + "comment": null + }, + { + "name": "repo", + "type": "git_repository *", + "comment": null + }, + { + "name": "mode", + "type": "git_filter_mode_t", + "comment": null + }, + { + "name": "options", + "type": "uint32_t", + "comment": null + } + ], + "argline": "git_filter_list **out, git_repository *repo, git_filter_mode_t mode, uint32_t options", + "sig": "git_filter_list **::git_repository *::git_filter_mode_t::uint32_t", + "return": { + "type": "int", + "comment": null + }, + "description": "

Create a new empty filter list

\n", + "comments": "

Normally you won't use this because git_filter_list_load will create the filter list for you, but you can use this in combination with the git_filter_lookup and git_filter_list_push functions to assemble your own chains of filters.

\n", + "group": "filter" + }, + "git_filter_list_push": { + "type": "function", + "file": "sys/filter.h", + "line": 76, + "lineto": 77, + "args": [ + { + "name": "fl", + "type": "git_filter_list *", + "comment": null + }, + { + "name": "filter", + "type": "git_filter *", + "comment": null + }, + { + "name": "payload", + "type": "void *", + "comment": null + } + ], + "argline": "git_filter_list *fl, git_filter *filter, void *payload", + "sig": "git_filter_list *::git_filter *::void *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Add a filter to a filter list with the given payload.

\n", + "comments": "

Normally you won't have to do this because the filter list is created by calling the "check" function on registered filters when the filter attributes are set, but this does allow more direct manipulation of filter lists when desired.

\n\n

Note that normally the "check" function can set up a payload for the filter. Using this function, you can either pass in a payload if you know the expected payload format, or you can pass NULL. Some filters may fail with a NULL payload. Good luck!

\n", + "group": "filter" + }, + "git_filter_source_repo": { + "type": "function", + "file": "sys/filter.h", + "line": 100, + "lineto": 100, + "args": [ + { + "name": "src", + "type": "const git_filter_source *", + "comment": null + } + ], + "argline": "const git_filter_source *src", + "sig": "const git_filter_source *", + "return": { + "type": "git_repository *", + "comment": null + }, + "description": "

Get the repository that the source data is coming from.

\n", + "comments": "", + "group": "filter" + }, + "git_filter_source_path": { + "type": "function", + "file": "sys/filter.h", + "line": 105, + "lineto": 105, + "args": [ + { + "name": "src", + "type": "const git_filter_source *", + "comment": null + } + ], + "argline": "const git_filter_source *src", + "sig": "const git_filter_source *", + "return": { + "type": "const char *", + "comment": null + }, + "description": "

Get the path that the source data is coming from.

\n", + "comments": "", + "group": "filter" + }, + "git_filter_source_filemode": { + "type": "function", + "file": "sys/filter.h", + "line": 111, + "lineto": 111, + "args": [ + { + "name": "src", + "type": "const git_filter_source *", + "comment": null + } + ], + "argline": "const git_filter_source *src", + "sig": "const git_filter_source *", + "return": { + "type": "uint16_t", + "comment": null + }, + "description": "

Get the file mode of the source file\n If the mode is unknown, this will return 0

\n", + "comments": "", + "group": "filter" + }, + "git_filter_source_id": { + "type": "function", + "file": "sys/filter.h", + "line": 118, + "lineto": 118, + "args": [ + { + "name": "src", + "type": "const git_filter_source *", + "comment": null + } + ], + "argline": "const git_filter_source *src", + "sig": "const git_filter_source *", + "return": { + "type": "const git_oid *", + "comment": null + }, + "description": "

Get the OID of the source\n If the OID is unknown (often the case with GIT_FILTER_CLEAN) then\n this will return NULL.

\n", + "comments": "", + "group": "filter" + }, + "git_filter_source_mode": { + "type": "function", + "file": "sys/filter.h", + "line": 123, + "lineto": 123, + "args": [ + { + "name": "src", + "type": "const git_filter_source *", + "comment": null + } + ], + "argline": "const git_filter_source *src", + "sig": "const git_filter_source *", + "return": { + "type": "git_filter_mode_t", + "comment": null + }, + "description": "

Get the git_filter_mode_t to be used

\n", + "comments": "", + "group": "filter" + }, + "git_filter_source_flags": { + "type": "function", + "file": "sys/filter.h", + "line": 128, + "lineto": 128, + "args": [ + { + "name": "src", + "type": "const git_filter_source *", + "comment": null + } + ], + "argline": "const git_filter_source *src", + "sig": "const git_filter_source *", + "return": { + "type": "uint32_t", + "comment": null + }, + "description": "

Get the combination git_filter_flag_t options to be applied

\n", + "comments": "", + "group": "filter" + }, + "git_filter_register": { + "type": "function", + "file": "sys/filter.h", + "line": 301, + "lineto": 302, + "args": [ + { + "name": "name", + "type": "const char *", + "comment": "A name by which the filter can be referenced. Attempting\n \t\t\tto register with an in-use name will return GIT_EEXISTS." + }, + { + "name": "filter", + "type": "git_filter *", + "comment": "The filter definition. This pointer will be stored as is\n \t\t\tby libgit2 so it must be a durable allocation (either static\n \t\t\tor on the heap)." + }, + { + "name": "priority", + "type": "int", + "comment": "The priority for filter application" + } + ], + "argline": "const char *name, git_filter *filter, int priority", + "sig": "const char *::git_filter *::int", + "return": { + "type": "int", + "comment": " 0 on successful registry, error code \n<\n0 on failure" + }, + "description": "

Register a filter under a given name with a given priority.

\n", + "comments": "

As mentioned elsewhere, the initialize callback will not be invoked immediately. It is deferred until the filter is used in some way.

\n\n

A filter's attribute checks and check and apply callbacks will be issued in order of priority on smudge (to workdir), and in reverse order of priority on clean (to odb).

\n\n

Two filters are preregistered with libgit2: - GIT_FILTER_CRLF with priority 0 - GIT_FILTER_IDENT with priority 100

\n\n

Currently the filter registry is not thread safe, so any registering or deregistering of filters must be done outside of any possible usage of the filters (i.e. during application setup or shutdown).

\n", + "group": "filter" + }, + "git_filter_unregister": { + "type": "function", + "file": "sys/filter.h", + "line": 317, + "lineto": 317, + "args": [ + { + "name": "name", + "type": "const char *", + "comment": "The name under which the filter was registered" + } + ], + "argline": "const char *name", + "sig": "const char *", + "return": { + "type": "int", + "comment": " 0 on success, error code \n<\n0 on failure" + }, + "description": "

Remove the filter with the given name

\n", + "comments": "

Attempting to remove the builtin libgit2 filters is not permitted and will return an error.

\n\n

Currently the filter registry is not thread safe, so any registering or deregistering of filters must be done outside of any possible usage of the filters (i.e. during application setup or shutdown).

\n", + "group": "filter" + }, + "git_hashsig_create_fromfile": { + "type": "function", + "file": "sys/hashsig.h", + "line": 81, + "lineto": 84, + "args": [ + { + "name": "out", + "type": "git_hashsig **", + "comment": "The computed similarity signature." + }, + { + "name": "path", + "type": "const char *", + "comment": "The path to the input file." + }, + { + "name": "opts", + "type": "git_hashsig_option_t", + "comment": "The signature computation options (see above)." + } + ], + "argline": "git_hashsig **out, const char *path, git_hashsig_option_t opts", + "sig": "git_hashsig **::const char *::git_hashsig_option_t", + "return": { + "type": "int", + "comment": " 0 on success, GIT_EBUFS if the buffer doesn't contain enough data to\n compute a valid signature (unless GIT_HASHSIG_ALLOW_SMALL_FILES is set), or\n error code." + }, + "description": "

Compute a similarity signature for a text file

\n", + "comments": "

This walks through the file, only loading a maximum of 4K of file data at a time. Otherwise, it acts just like git_hashsig_create.

\n", + "group": "hashsig" + }, + "git_hashsig_free": { + "type": "function", + "file": "sys/hashsig.h", + "line": 91, + "lineto": 91, + "args": [ + { + "name": "sig", + "type": "git_hashsig *", + "comment": "The similarity signature to free." + } + ], + "argline": "git_hashsig *sig", + "sig": "git_hashsig *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Release memory for a content similarity signature

\n", + "comments": "", + "group": "hashsig" + }, + "git_hashsig_compare": { + "type": "function", + "file": "sys/hashsig.h", + "line": 100, + "lineto": 102, + "args": [ + { + "name": "a", + "type": "const git_hashsig *", + "comment": "The first similarity signature to compare." + }, + { + "name": "b", + "type": "const git_hashsig *", + "comment": "The second similarity signature to compare." + } + ], + "argline": "const git_hashsig *a, const git_hashsig *b", + "sig": "const git_hashsig *::const git_hashsig *", + "return": { + "type": "int", + "comment": " [0 to 100] on success as the similarity score, or error code." + }, + "description": "

Measure similarity score between two similarity signatures

\n", + "comments": "", + "group": "hashsig" + }, + "git_mempack_new": { + "type": "function", + "file": "sys/mempack.h", + "line": 44, + "lineto": 44, + "args": [ + { + "name": "out", + "type": "git_odb_backend **", + "comment": "Poiter where to store the ODB backend" + } + ], + "argline": "git_odb_backend **out", + "sig": "git_odb_backend **", + "return": { + "type": "int", + "comment": " 0 on success; error code otherwise" + }, + "description": "
Instantiate a new mempack backend.\n
\n", + "comments": "
The backend must be added to an existing ODB with the highest   priority.\n\n    git_mempack_new(&mempacker);        git_repository_odb(&odb, repository);       git_odb_add_backend(odb, mempacker, 999);\n\nOnce the backend has been loaded, all writes to the ODB will    instead be queued in memory, and can be finalized with  `git_mempack_dump`.\n\nSubsequent reads will also be served from the in-memory store   to ensure consistency, until the memory store is dumped.\n
\n", + "group": "mempack" + }, + "git_mempack_reset": { + "type": "function", + "file": "sys/mempack.h", + "line": 81, + "lineto": 81, + "args": [ + { + "name": "backend", + "type": "git_odb_backend *", + "comment": "The mempack backend" + } + ], + "argline": "git_odb_backend *backend", + "sig": "git_odb_backend *", + "return": { + "type": "void", + "comment": null + }, + "description": "
Reset the memory packer by clearing all the queued objects.\n
\n", + "comments": "
This assumes that `git_mempack_dump` has been called before to  store all the queued objects into a single packfile.\n\nAlternatively, call `reset` without a previous dump to "undo"   all the recently written objects, giving transaction-like   semantics to the Git repository.\n
\n", + "group": "mempack" + }, + "git_odb_init_backend": { + "type": "function", + "file": "sys/odb_backend.h", + "line": 104, + "lineto": 106, + "args": [ + { + "name": "backend", + "type": "git_odb_backend *", + "comment": "the `git_odb_backend` struct to initialize." + }, + { + "name": "version", + "type": "unsigned int", + "comment": "Version the struct; pass `GIT_ODB_BACKEND_VERSION`" + } + ], + "argline": "git_odb_backend *backend, unsigned int version", + "sig": "git_odb_backend *::unsigned int", + "return": { + "type": "int", + "comment": " Zero on success; -1 on failure." + }, + "description": "

Initializes a git_odb_backend with default values. Equivalent to\n creating an instance with GIT_ODB_BACKEND_INIT.

\n", + "comments": "", + "group": "odb" + }, + "git_openssl_set_locking": { + "type": "function", + "file": "sys/openssl.h", + "line": 34, + "lineto": 34, + "args": [], + "argline": "", + "sig": "", + "return": { + "type": "int", + "comment": " 0 on success, -1 if there are errors or if libgit2 was not\n built with OpenSSL and threading support." + }, + "description": "

Initialize the OpenSSL locks

\n", + "comments": "

OpenSSL requires the application to determine how it performs locking.

\n\n

This is a last-resort convenience function which libgit2 provides for allocating and initializing the locks as well as setting the locking function to use the system's native locking functions.

\n\n

The locking function will be cleared and the memory will be freed when you call git_threads_sutdown().

\n\n

If your programming language has an OpenSSL package/bindings, it likely sets up locking. You should very strongly prefer that over this function.

\n", + "group": "openssl" + }, + "git_refdb_init_backend": { + "type": "function", + "file": "sys/refdb_backend.h", + "line": 183, + "lineto": 185, + "args": [ + { + "name": "backend", + "type": "git_refdb_backend *", + "comment": "the `git_refdb_backend` struct to initialize" + }, + { + "name": "version", + "type": "unsigned int", + "comment": "Version of struct; pass `GIT_REFDB_BACKEND_VERSION`" + } + ], + "argline": "git_refdb_backend *backend, unsigned int version", + "sig": "git_refdb_backend *::unsigned int", + "return": { + "type": "int", + "comment": " Zero on success; -1 on failure." + }, + "description": "

Initializes a git_refdb_backend with default values. Equivalent to\n creating an instance with GIT_REFDB_BACKEND_INIT.

\n", + "comments": "", + "group": "refdb" + }, + "git_refdb_backend_fs": { + "type": "function", + "file": "sys/refdb_backend.h", + "line": 198, + "lineto": 200, + "args": [ + { + "name": "backend_out", + "type": "git_refdb_backend **", + "comment": "Output pointer to the git_refdb_backend object" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "Git repository to access" + } + ], + "argline": "git_refdb_backend **backend_out, git_repository *repo", + "sig": "git_refdb_backend **::git_repository *", + "return": { + "type": "int", + "comment": " 0 on success, \n<\n0 error code on failure" + }, + "description": "

Constructors for default filesystem-based refdb backend

\n", + "comments": "

Under normal usage, this is called for you when the repository is opened / created, but you can use this to explicitly construct a filesystem refdb backend for a repository.

\n", + "group": "refdb" + }, + "git_refdb_set_backend": { + "type": "function", + "file": "sys/refdb_backend.h", + "line": 212, + "lineto": 214, + "args": [ + { + "name": "refdb", + "type": "git_refdb *", + "comment": "database to add the backend to" + }, + { + "name": "backend", + "type": "git_refdb_backend *", + "comment": "pointer to a git_refdb_backend instance" + } + ], + "argline": "git_refdb *refdb, git_refdb_backend *backend", + "sig": "git_refdb *::git_refdb_backend *", + "return": { + "type": "int", + "comment": " 0 on success; error code otherwise" + }, + "description": "

Sets the custom backend to an existing reference DB

\n", + "comments": "

The git_refdb will take ownership of the git_refdb_backend so you should NOT free it after calling this function.

\n", + "group": "refdb" + }, + "git_reference__alloc": { + "type": "function", + "file": "sys/refs.h", + "line": 31, + "lineto": 34, + "args": [ + { + "name": "name", + "type": "const char *", + "comment": "the reference name" + }, + { + "name": "oid", + "type": "const git_oid *", + "comment": "the object id for a direct reference" + }, + { + "name": "peel", + "type": "const git_oid *", + "comment": "the first non-tag object's OID, or NULL" + } + ], + "argline": "const char *name, const git_oid *oid, const git_oid *peel", + "sig": "const char *::const git_oid *::const git_oid *", + "return": { + "type": "git_reference *", + "comment": " the created git_reference or NULL on error" + }, + "description": "

Create a new direct reference from an OID.

\n", + "comments": "", + "group": "reference" + }, + "git_reference__alloc_symbolic": { + "type": "function", + "file": "sys/refs.h", + "line": 43, + "lineto": 45, + "args": [ + { + "name": "name", + "type": "const char *", + "comment": "the reference name" + }, + { + "name": "target", + "type": "const char *", + "comment": "the target for a symbolic reference" + } + ], + "argline": "const char *name, const char *target", + "sig": "const char *::const char *", + "return": { + "type": "git_reference *", + "comment": " the created git_reference or NULL on error" + }, + "description": "

Create a new symbolic reference.

\n", + "comments": "", + "group": "reference" + }, + "git_repository_new": { + "type": "function", + "file": "sys/repository.h", + "line": 31, + "lineto": 31, + "args": [ + { + "name": "out", + "type": "git_repository **", + "comment": "The blank repository" + } + ], + "argline": "git_repository **out", + "sig": "git_repository **", + "return": { + "type": "int", + "comment": " 0 on success, or an error code" + }, + "description": "

Create a new repository with neither backends nor config object

\n", + "comments": "

Note that this is only useful if you wish to associate the repository with a non-filesystem-backed object database and config store.

\n", + "group": "repository" + }, + "git_repository__cleanup": { + "type": "function", + "file": "sys/repository.h", + "line": 44, + "lineto": 44, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": null + } + ], + "argline": "git_repository *repo", + "sig": "git_repository *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Reset all the internal state in a repository.

\n", + "comments": "

This will free all the mapped memory and internal objects of the repository and leave it in a "blank" state.

\n\n

There's no need to call this function directly unless you're trying to aggressively cleanup the repo before its deallocation. git_repository_free already performs this operation before deallocation the repo.

\n", + "group": "repository" + }, + "git_repository_reinit_filesystem": { + "type": "function", + "file": "sys/repository.h", + "line": 61, + "lineto": 63, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "A repository object" + }, + { + "name": "recurse_submodules", + "type": "int", + "comment": "Should submodules be updated recursively" + } + ], + "argline": "git_repository *repo, int recurse_submodules", + "sig": "git_repository *::int", + "return": { + "type": "int", + "comment": " 0 on success, \n<\n 0 on error" + }, + "description": "

Update the filesystem config settings for an open repository

\n", + "comments": "

When a repository is initialized, config values are set based on the properties of the filesystem that the repository is on, such as "core.ignorecase", "core.filemode", "core.symlinks", etc. If the repository is moved to a new filesystem, these properties may no longer be correct and API calls may not behave as expected. This call reruns the phase of repository initialization that sets those properties to compensate for the current filesystem of the repo.

\n", + "group": "repository" + }, + "git_repository_set_config": { + "type": "function", + "file": "sys/repository.h", + "line": 78, + "lineto": 78, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "A repository object" + }, + { + "name": "config", + "type": "git_config *", + "comment": "A Config object" + } + ], + "argline": "git_repository *repo, git_config *config", + "sig": "git_repository *::git_config *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Set the configuration file for this repository

\n", + "comments": "

This configuration file will be used for all configuration queries involving this repository.

\n\n

The repository will keep a reference to the config file; the user must still free the config after setting it to the repository, or it will leak.

\n", + "group": "repository" + }, + "git_repository_set_odb": { + "type": "function", + "file": "sys/repository.h", + "line": 93, + "lineto": 93, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "A repository object" + }, + { + "name": "odb", + "type": "git_odb *", + "comment": "An ODB object" + } + ], + "argline": "git_repository *repo, git_odb *odb", + "sig": "git_repository *::git_odb *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Set the Object Database for this repository

\n", + "comments": "

The ODB will be used for all object-related operations involving this repository.

\n\n

The repository will keep a reference to the ODB; the user must still free the ODB object after setting it to the repository, or it will leak.

\n", + "group": "repository" + }, + "git_repository_set_refdb": { + "type": "function", + "file": "sys/repository.h", + "line": 108, + "lineto": 108, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "A repository object" + }, + { + "name": "refdb", + "type": "git_refdb *", + "comment": "An refdb object" + } + ], + "argline": "git_repository *repo, git_refdb *refdb", + "sig": "git_repository *::git_refdb *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Set the Reference Database Backend for this repository

\n", + "comments": "

The refdb will be used for all reference related operations involving this repository.

\n\n

The repository will keep a reference to the refdb; the user must still free the refdb object after setting it to the repository, or it will leak.

\n", + "group": "repository" + }, + "git_repository_set_index": { + "type": "function", + "file": "sys/repository.h", + "line": 123, + "lineto": 123, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "A repository object" + }, + { + "name": "index", + "type": "git_index *", + "comment": "An index object" + } + ], + "argline": "git_repository *repo, git_index *index", + "sig": "git_repository *::git_index *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Set the index file for this repository

\n", + "comments": "

This index will be used for all index-related operations involving this repository.

\n\n

The repository will keep a reference to the index file; the user must still free the index after setting it to the repository, or it will leak.

\n", + "group": "repository" + }, + "git_repository_set_bare": { + "type": "function", + "file": "sys/repository.h", + "line": 136, + "lineto": 136, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "Repo to make bare" + } + ], + "argline": "git_repository *repo", + "sig": "git_repository *", + "return": { + "type": "int", + "comment": " 0 on success, \n<\n0 on failure" + }, + "description": "

Set a repository to be bare.

\n", + "comments": "

Clear the working directory and set core.bare to true. You may also want to call git_repository_set_index(repo, NULL) since a bare repo typically does not have an index, but this function will not do that for you.

\n", + "group": "repository" + }, + "git_stream_register_tls": { + "type": "function", + "file": "sys/stream.h", + "line": 54, + "lineto": 54, + "args": [ + { + "name": "ctor", + "type": "git_stream_cb", + "comment": "the constructor to use" + } + ], + "argline": "git_stream_cb ctor", + "sig": "git_stream_cb", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Register a TLS stream constructor for the library to use

\n", + "comments": "

If a constructor is already set, it will be overwritten. Pass NULL in order to deregister the current constructor.

\n", + "group": "stream" + }, + "git_transport_init": { + "type": "function", + "file": "sys/transport.h", + "line": 119, + "lineto": 121, + "args": [ + { + "name": "opts", + "type": "git_transport *", + "comment": "the `git_transport` struct to initialize" + }, + { + "name": "version", + "type": "unsigned int", + "comment": "Version of struct; pass `GIT_TRANSPORT_VERSION`" + } + ], + "argline": "git_transport *opts, unsigned int version", + "sig": "git_transport *::unsigned int", + "return": { + "type": "int", + "comment": " Zero on success; -1 on failure." + }, + "description": "

Initializes a git_transport with default values. Equivalent to\n creating an instance with GIT_TRANSPORT_INIT.

\n", + "comments": "", + "group": "transport" + }, + "git_transport_new": { + "type": "function", + "file": "sys/transport.h", + "line": 133, + "lineto": 133, + "args": [ + { + "name": "out", + "type": "git_transport **", + "comment": "The newly created transport (out)" + }, + { + "name": "owner", + "type": "git_remote *", + "comment": "The git_remote which will own this transport" + }, + { + "name": "url", + "type": "const char *", + "comment": "The URL to connect to" + } + ], + "argline": "git_transport **out, git_remote *owner, const char *url", + "sig": "git_transport **::git_remote *::const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Function to use to create a transport from a URL. The transport database\n is scanned to find a transport that implements the scheme of the URI (i.e.\n git:// or http://) and a transport object is returned to the caller.

\n", + "comments": "", + "group": "transport" + }, + "git_transport_ssh_with_paths": { + "type": "function", + "file": "sys/transport.h", + "line": 149, + "lineto": 149, + "args": [ + { + "name": "out", + "type": "git_transport **", + "comment": "the resulting transport" + }, + { + "name": "owner", + "type": "git_remote *", + "comment": "the owning remote" + }, + { + "name": "payload", + "type": "void *", + "comment": "a strarray with the paths" + } + ], + "argline": "git_transport **out, git_remote *owner, void *payload", + "sig": "git_transport **::git_remote *::void *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Create an ssh transport with custom git command paths

\n", + "comments": "

This is a factory function suitable for setting as the transport callback in a remote (or for a clone in the options).

\n\n

The payload argument must be a strarray pointer with the paths for the git-upload-pack and git-receive-pack at index 0 and 1.

\n", + "group": "transport" + }, + "git_transport_unregister": { + "type": "function", + "file": "sys/transport.h", + "line": 177, + "lineto": 178, + "args": [ + { + "name": "prefix", + "type": "const char *", + "comment": "From the previous call to git_transport_register" + } + ], + "argline": "const char *prefix", + "sig": "const char *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Unregister a custom transport definition which was previously registered\n with git_transport_register.

\n", + "comments": "", + "group": "transport" + }, + "git_transport_dummy": { + "type": "function", + "file": "sys/transport.h", + "line": 191, + "lineto": 194, + "args": [ + { + "name": "out", + "type": "git_transport **", + "comment": "The newly created transport (out)" + }, + { + "name": "owner", + "type": "git_remote *", + "comment": "The git_remote which will own this transport" + }, + { + "name": "payload", + "type": "void *", + "comment": "You must pass NULL for this parameter." + } + ], + "argline": "git_transport **out, git_remote *owner, void *payload", + "sig": "git_transport **::git_remote *::void *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Create an instance of the dummy transport.

\n", + "comments": "", + "group": "transport" + }, + "git_transport_local": { + "type": "function", + "file": "sys/transport.h", + "line": 204, + "lineto": 207, + "args": [ + { + "name": "out", + "type": "git_transport **", + "comment": "The newly created transport (out)" + }, + { + "name": "owner", + "type": "git_remote *", + "comment": "The git_remote which will own this transport" + }, + { + "name": "payload", + "type": "void *", + "comment": "You must pass NULL for this parameter." + } + ], + "argline": "git_transport **out, git_remote *owner, void *payload", + "sig": "git_transport **::git_remote *::void *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Create an instance of the local transport.

\n", + "comments": "", + "group": "transport" + }, + "git_transport_smart": { + "type": "function", + "file": "sys/transport.h", + "line": 217, + "lineto": 220, + "args": [ + { + "name": "out", + "type": "git_transport **", + "comment": "The newly created transport (out)" + }, + { + "name": "owner", + "type": "git_remote *", + "comment": "The git_remote which will own this transport" + }, + { + "name": "payload", + "type": "void *", + "comment": "A pointer to a git_smart_subtransport_definition" + } + ], + "argline": "git_transport **out, git_remote *owner, void *payload", + "sig": "git_transport **::git_remote *::void *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Create an instance of the smart transport.

\n", + "comments": "", + "group": "transport" + }, + "git_transport_smart_certificate_check": { + "type": "function", + "file": "sys/transport.h", + "line": 231, + "lineto": 231, + "args": [ + { + "name": "transport", + "type": "git_transport *", + "comment": "a smart transport" + }, + { + "name": "cert", + "type": "git_cert *", + "comment": "the certificate to pass to the caller" + }, + { + "name": "valid", + "type": "int", + "comment": "whether we believe the certificate is valid" + }, + { + "name": "hostname", + "type": "const char *", + "comment": "the hostname we connected to" + } + ], + "argline": "git_transport *transport, git_cert *cert, int valid, const char *hostname", + "sig": "git_transport *::git_cert *::int::const char *", + "return": { + "type": "int", + "comment": " the return value of the callback" + }, + "description": "

Call the certificate check for this transport.

\n", + "comments": "", + "group": "transport" + }, + "git_smart_subtransport_http": { + "type": "function", + "file": "sys/transport.h", + "line": 352, + "lineto": 355, + "args": [ + { + "name": "out", + "type": "git_smart_subtransport **", + "comment": "The newly created subtransport" + }, + { + "name": "owner", + "type": "git_transport *", + "comment": "The smart transport to own this subtransport" + }, + { + "name": "param", + "type": "void *", + "comment": null + } + ], + "argline": "git_smart_subtransport **out, git_transport *owner, void *param", + "sig": "git_smart_subtransport **::git_transport *::void *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Create an instance of the http subtransport. This subtransport\n also supports https. On Win32, this subtransport may be implemented\n using the WinHTTP library.

\n", + "comments": "", + "group": "smart" + }, + "git_smart_subtransport_git": { + "type": "function", + "file": "sys/transport.h", + "line": 364, + "lineto": 367, + "args": [ + { + "name": "out", + "type": "git_smart_subtransport **", + "comment": "The newly created subtransport" + }, + { + "name": "owner", + "type": "git_transport *", + "comment": "The smart transport to own this subtransport" + }, + { + "name": "param", + "type": "void *", + "comment": null + } + ], + "argline": "git_smart_subtransport **out, git_transport *owner, void *param", + "sig": "git_smart_subtransport **::git_transport *::void *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Create an instance of the git subtransport.

\n", + "comments": "", + "group": "smart" + }, + "git_smart_subtransport_ssh": { + "type": "function", + "file": "sys/transport.h", + "line": 376, + "lineto": 379, + "args": [ + { + "name": "out", + "type": "git_smart_subtransport **", + "comment": "The newly created subtransport" + }, + { + "name": "owner", + "type": "git_transport *", + "comment": "The smart transport to own this subtransport" + }, + { + "name": "param", + "type": "void *", + "comment": null + } + ], + "argline": "git_smart_subtransport **out, git_transport *owner, void *param", + "sig": "git_smart_subtransport **::git_transport *::void *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Create an instance of the ssh subtransport.

\n", + "comments": "", + "group": "smart" + }, + "git_tag_lookup": { + "type": "function", + "file": "tag.h", + "line": 33, + "lineto": 34, + "args": [ + { + "name": "out", + "type": "git_tag **", + "comment": "pointer to the looked up tag" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repo to use when locating the tag." + }, + { + "name": "id", + "type": "const git_oid *", + "comment": "identity of the tag to locate." + } + ], + "argline": "git_tag **out, git_repository *repo, const git_oid *id", + "sig": "git_tag **::git_repository *::const git_oid *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Lookup a tag object from the repository.

\n", + "comments": "", + "group": "tag", + "examples": { + "general.c": [ + "ex/HEAD/general.html#git_tag_lookup-70" + ] + } + }, + "git_tag_lookup_prefix": { + "type": "function", + "file": "tag.h", + "line": 48, + "lineto": 49, + "args": [ + { + "name": "out", + "type": "git_tag **", + "comment": "pointer to the looked up tag" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repo to use when locating the tag." + }, + { + "name": "id", + "type": "const git_oid *", + "comment": "identity of the tag to locate." + }, + { + "name": "len", + "type": "size_t", + "comment": "the length of the short identifier" + } + ], + "argline": "git_tag **out, git_repository *repo, const git_oid *id, size_t len", + "sig": "git_tag **::git_repository *::const git_oid *::size_t", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Lookup a tag object from the repository,\n given a prefix of its identifier (short id).

\n", + "comments": "", + "group": "tag" + }, + "git_tag_free": { + "type": "function", + "file": "tag.h", + "line": 61, + "lineto": 61, + "args": [ + { + "name": "tag", + "type": "git_tag *", + "comment": "the tag to close" + } + ], + "argline": "git_tag *tag", + "sig": "git_tag *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Close an open tag

\n", + "comments": "

You can no longer use the git_tag pointer after this call.

\n\n

IMPORTANT: You MUST call this method when you are through with a tag to release memory. Failure to do so will cause a memory leak.

\n", + "group": "tag" + }, + "git_tag_id": { + "type": "function", + "file": "tag.h", + "line": 69, + "lineto": 69, + "args": [ + { + "name": "tag", + "type": "const git_tag *", + "comment": "a previously loaded tag." + } + ], + "argline": "const git_tag *tag", + "sig": "const git_tag *", + "return": { + "type": "const git_oid *", + "comment": " object identity for the tag." + }, + "description": "

Get the id of a tag.

\n", + "comments": "", + "group": "tag" + }, + "git_tag_owner": { + "type": "function", + "file": "tag.h", + "line": 77, + "lineto": 77, + "args": [ + { + "name": "tag", + "type": "const git_tag *", + "comment": "A previously loaded tag." + } + ], + "argline": "const git_tag *tag", + "sig": "const git_tag *", + "return": { + "type": "git_repository *", + "comment": " Repository that contains this tag." + }, + "description": "

Get the repository that contains the tag.

\n", + "comments": "", + "group": "tag" + }, + "git_tag_target": { + "type": "function", + "file": "tag.h", + "line": 89, + "lineto": 89, + "args": [ + { + "name": "target_out", + "type": "git_object **", + "comment": "pointer where to store the target" + }, + { + "name": "tag", + "type": "const git_tag *", + "comment": "a previously loaded tag." + } + ], + "argline": "git_object **target_out, const git_tag *tag", + "sig": "git_object **::const git_tag *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Get the tagged object of a tag

\n", + "comments": "

This method performs a repository lookup for the given object and returns it

\n", + "group": "tag", + "examples": { + "general.c": [ + "ex/HEAD/general.html#git_tag_target-71" + ] + } + }, + "git_tag_target_id": { + "type": "function", + "file": "tag.h", + "line": 97, + "lineto": 97, + "args": [ + { + "name": "tag", + "type": "const git_tag *", + "comment": "a previously loaded tag." + } + ], + "argline": "const git_tag *tag", + "sig": "const git_tag *", + "return": { + "type": "const git_oid *", + "comment": " pointer to the OID" + }, + "description": "

Get the OID of the tagged object of a tag

\n", + "comments": "", + "group": "tag", + "examples": { + "cat-file.c": [ + "ex/HEAD/cat-file.html#git_tag_target_id-35" + ] + } + }, + "git_tag_target_type": { + "type": "function", + "file": "tag.h", + "line": 105, + "lineto": 105, + "args": [ + { + "name": "tag", + "type": "const git_tag *", + "comment": "a previously loaded tag." + } + ], + "argline": "const git_tag *tag", + "sig": "const git_tag *", + "return": { + "type": "git_otype", + "comment": " type of the tagged object" + }, + "description": "

Get the type of a tag's tagged object

\n", + "comments": "", + "group": "tag", + "examples": { + "cat-file.c": [ + "ex/HEAD/cat-file.html#git_tag_target_type-36" + ], + "general.c": [ + "ex/HEAD/general.html#git_tag_target_type-72" + ] + } + }, + "git_tag_name": { + "type": "function", + "file": "tag.h", + "line": 113, + "lineto": 113, + "args": [ + { + "name": "tag", + "type": "const git_tag *", + "comment": "a previously loaded tag." + } + ], + "argline": "const git_tag *tag", + "sig": "const git_tag *", + "return": { + "type": "const char *", + "comment": " name of the tag" + }, + "description": "

Get the name of a tag

\n", + "comments": "", + "group": "tag", + "examples": { + "cat-file.c": [ + "ex/HEAD/cat-file.html#git_tag_name-37" + ], + "general.c": [ + "ex/HEAD/general.html#git_tag_name-73" + ], + "tag.c": [ + "ex/HEAD/tag.html#git_tag_name-20" + ] + } + }, + "git_tag_tagger": { + "type": "function", + "file": "tag.h", + "line": 121, + "lineto": 121, + "args": [ + { + "name": "tag", + "type": "const git_tag *", + "comment": "a previously loaded tag." + } + ], + "argline": "const git_tag *tag", + "sig": "const git_tag *", + "return": { + "type": "const git_signature *", + "comment": " reference to the tag's author or NULL when unspecified" + }, + "description": "

Get the tagger (author) of a tag

\n", + "comments": "", + "group": "tag", + "examples": { + "cat-file.c": [ + "ex/HEAD/cat-file.html#git_tag_tagger-38" + ] + } + }, + "git_tag_message": { + "type": "function", + "file": "tag.h", + "line": 129, + "lineto": 129, + "args": [ + { + "name": "tag", + "type": "const git_tag *", + "comment": "a previously loaded tag." + } + ], + "argline": "const git_tag *tag", + "sig": "const git_tag *", + "return": { + "type": "const char *", + "comment": " message of the tag or NULL when unspecified" + }, + "description": "

Get the message of a tag

\n", + "comments": "", + "group": "tag", + "examples": { + "cat-file.c": [ + "ex/HEAD/cat-file.html#git_tag_message-39", + "ex/HEAD/cat-file.html#git_tag_message-40" + ], + "general.c": [ + "ex/HEAD/general.html#git_tag_message-74" + ], + "tag.c": [ + "ex/HEAD/tag.html#git_tag_message-21" + ] + } + }, + "git_tag_create": { + "type": "function", + "file": "tag.h", + "line": 171, + "lineto": 178, + "args": [ + { + "name": "oid", + "type": "git_oid *", + "comment": "Pointer where to store the OID of the\n newly created tag. If the tag already exists, this parameter\n will be the oid of the existing tag, and the function will\n return a GIT_EEXISTS error code." + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "Repository where to store the tag" + }, + { + "name": "tag_name", + "type": "const char *", + "comment": "Name for the tag; this name is validated\n for consistency. It should also not conflict with an\n already existing tag name" + }, + { + "name": "target", + "type": "const git_object *", + "comment": "Object to which this tag points. This object\n must belong to the given `repo`." + }, + { + "name": "tagger", + "type": "const git_signature *", + "comment": "Signature of the tagger for this tag, and\n of the tagging time" + }, + { + "name": "message", + "type": "const char *", + "comment": "Full message for this tag" + }, + { + "name": "force", + "type": "int", + "comment": "Overwrite existing references" + } + ], + "argline": "git_oid *oid, git_repository *repo, const char *tag_name, const git_object *target, const git_signature *tagger, const char *message, int force", + "sig": "git_oid *::git_repository *::const char *::const git_object *::const git_signature *::const char *::int", + "return": { + "type": "int", + "comment": " 0 on success, GIT_EINVALIDSPEC or an error code\n\tA tag object is written to the ODB, and a proper reference\n\tis written in the /refs/tags folder, pointing to it" + }, + "description": "

Create a new tag in the repository from an object

\n", + "comments": "

A new reference will also be created pointing to this tag object. If force is true and a reference already exists with the given name, it'll be replaced.

\n\n

The message will not be cleaned up. This can be achieved through git_message_prettify().

\n\n

The tag name will be checked for validity. You must avoid the characters '~', '^', ':', '\\', '?', '[', and '*', and the sequences ".." and "@{" which have special meaning to revparse.

\n", + "group": "tag", + "examples": { + "tag.c": [ + "ex/HEAD/tag.html#git_tag_create-22" + ] + } + }, + "git_tag_annotation_create": { + "type": "function", + "file": "tag.h", + "line": 203, + "lineto": 209, + "args": [ + { + "name": "oid", + "type": "git_oid *", + "comment": "Pointer where to store the OID of the\n newly created tag" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "Repository where to store the tag" + }, + { + "name": "tag_name", + "type": "const char *", + "comment": "Name for the tag" + }, + { + "name": "target", + "type": "const git_object *", + "comment": "Object to which this tag points. This object\n must belong to the given `repo`." + }, + { + "name": "tagger", + "type": "const git_signature *", + "comment": "Signature of the tagger for this tag, and\n of the tagging time" + }, + { + "name": "message", + "type": "const char *", + "comment": "Full message for this tag" + } + ], + "argline": "git_oid *oid, git_repository *repo, const char *tag_name, const git_object *target, const git_signature *tagger, const char *message", + "sig": "git_oid *::git_repository *::const char *::const git_object *::const git_signature *::const char *", + "return": { + "type": "int", + "comment": " 0 on success or an error code" + }, + "description": "

Create a new tag in the object database pointing to a git_object

\n", + "comments": "

The message will not be cleaned up. This can be achieved through git_message_prettify().

\n", + "group": "tag" + }, + "git_tag_create_frombuffer": { + "type": "function", + "file": "tag.h", + "line": 220, + "lineto": 224, + "args": [ + { + "name": "oid", + "type": "git_oid *", + "comment": "Pointer where to store the OID of the newly created tag" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "Repository where to store the tag" + }, + { + "name": "buffer", + "type": "const char *", + "comment": "Raw tag data" + }, + { + "name": "force", + "type": "int", + "comment": "Overwrite existing tags" + } + ], + "argline": "git_oid *oid, git_repository *repo, const char *buffer, int force", + "sig": "git_oid *::git_repository *::const char *::int", + "return": { + "type": "int", + "comment": " 0 on success; error code otherwise" + }, + "description": "

Create a new tag in the repository from a buffer

\n", + "comments": "", + "group": "tag" + }, + "git_tag_create_lightweight": { + "type": "function", + "file": "tag.h", + "line": 256, + "lineto": 261, + "args": [ + { + "name": "oid", + "type": "git_oid *", + "comment": "Pointer where to store the OID of the provided\n target object. If the tag already exists, this parameter\n will be filled with the oid of the existing pointed object\n and the function will return a GIT_EEXISTS error code." + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "Repository where to store the lightweight tag" + }, + { + "name": "tag_name", + "type": "const char *", + "comment": "Name for the tag; this name is validated\n for consistency. It should also not conflict with an\n already existing tag name" + }, + { + "name": "target", + "type": "const git_object *", + "comment": "Object to which this tag points. This object\n must belong to the given `repo`." + }, + { + "name": "force", + "type": "int", + "comment": "Overwrite existing references" + } + ], + "argline": "git_oid *oid, git_repository *repo, const char *tag_name, const git_object *target, int force", + "sig": "git_oid *::git_repository *::const char *::const git_object *::int", + "return": { + "type": "int", + "comment": " 0 on success, GIT_EINVALIDSPEC or an error code\n\tA proper reference is written in the /refs/tags folder,\n pointing to the provided target object" + }, + "description": "

Create a new lightweight tag pointing at a target object

\n", + "comments": "

A new direct reference will be created pointing to this target object. If force is true and a reference already exists with the given name, it'll be replaced.

\n\n

The tag name will be checked for validity. See git_tag_create() for rules about valid names.

\n", + "group": "tag", + "examples": { + "tag.c": [ + "ex/HEAD/tag.html#git_tag_create_lightweight-23" + ] + } + }, + "git_tag_delete": { + "type": "function", + "file": "tag.h", + "line": 276, + "lineto": 278, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "Repository where lives the tag" + }, + { + "name": "tag_name", + "type": "const char *", + "comment": "Name of the tag to be deleted;\n this name is validated for consistency." + } + ], + "argline": "git_repository *repo, const char *tag_name", + "sig": "git_repository *::const char *", + "return": { + "type": "int", + "comment": " 0 on success, GIT_EINVALIDSPEC or an error code" + }, + "description": "

Delete an existing tag reference.

\n", + "comments": "

The tag name will be checked for validity. See git_tag_create() for rules about valid names.

\n", + "group": "tag", + "examples": { + "tag.c": [ + "ex/HEAD/tag.html#git_tag_delete-24" + ] + } + }, + "git_tag_list": { + "type": "function", + "file": "tag.h", + "line": 293, + "lineto": 295, + "args": [ + { + "name": "tag_names", + "type": "git_strarray *", + "comment": "Pointer to a git_strarray structure where\n\t\tthe tag names will be stored" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "Repository where to find the tags" + } + ], + "argline": "git_strarray *tag_names, git_repository *repo", + "sig": "git_strarray *::git_repository *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Fill a list with all the tags in the Repository

\n", + "comments": "

The string array will be filled with the names of the matching tags; these values are owned by the user and should be free'd manually when no longer needed, using git_strarray_free.

\n", + "group": "tag" + }, + "git_tag_list_match": { + "type": "function", + "file": "tag.h", + "line": 315, + "lineto": 318, + "args": [ + { + "name": "tag_names", + "type": "git_strarray *", + "comment": "Pointer to a git_strarray structure where\n\t\tthe tag names will be stored" + }, + { + "name": "pattern", + "type": "const char *", + "comment": "Standard fnmatch pattern" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "Repository where to find the tags" + } + ], + "argline": "git_strarray *tag_names, const char *pattern, git_repository *repo", + "sig": "git_strarray *::const char *::git_repository *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Fill a list with all the tags in the Repository\n which name match a defined pattern

\n", + "comments": "

If an empty pattern is provided, all the tags will be returned.

\n\n

The string array will be filled with the names of the matching tags; these values are owned by the user and should be free'd manually when no longer needed, using git_strarray_free.

\n", + "group": "tag", + "examples": { + "tag.c": [ + "ex/HEAD/tag.html#git_tag_list_match-25" + ] + } + }, + "git_tag_foreach": { + "type": "function", + "file": "tag.h", + "line": 330, + "lineto": 333, + "args": [ + { + "name": "repo", + "type": "git_repository *", + "comment": "Repository" + }, + { + "name": "callback", + "type": "git_tag_foreach_cb", + "comment": "Callback function" + }, + { + "name": "payload", + "type": "void *", + "comment": "Pointer to callback data (optional)" + } + ], + "argline": "git_repository *repo, git_tag_foreach_cb callback, void *payload", + "sig": "git_repository *::git_tag_foreach_cb::void *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Call callback `cb' for each tag in the repository

\n", + "comments": "", + "group": "tag" + }, + "git_tag_peel": { + "type": "function", + "file": "tag.h", + "line": 346, + "lineto": 348, + "args": [ + { + "name": "tag_target_out", + "type": "git_object **", + "comment": "Pointer to the peeled git_object" + }, + { + "name": "tag", + "type": "const git_tag *", + "comment": "The tag to be processed" + } + ], + "argline": "git_object **tag_target_out, const git_tag *tag", + "sig": "git_object **::const git_tag *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Recursively peel a tag until a non tag git_object is found

\n", + "comments": "

The retrieved tag_target object is owned by the repository and should be closed with the git_object_free method.

\n", + "group": "tag" + }, + "git_tag_dup": { + "type": "function", + "file": "tag.h", + "line": 357, + "lineto": 357, + "args": [ + { + "name": "out", + "type": "git_tag **", + "comment": "Pointer to store the copy of the tag" + }, + { + "name": "source", + "type": "git_tag *", + "comment": "Original tag to copy" + } + ], + "argline": "git_tag **out, git_tag *source", + "sig": "git_tag **::git_tag *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Create an in-memory copy of a tag. The copy must be explicitly\n free'd or it will leak.

\n", + "comments": "", + "group": "tag" + }, + "git_trace_set": { + "type": "function", + "file": "trace.h", + "line": 63, + "lineto": 63, + "args": [ + { + "name": "level", + "type": "git_trace_level_t", + "comment": "Level to set tracing to" + }, + { + "name": "cb", + "type": "git_trace_callback", + "comment": "Function to call with trace data" + } + ], + "argline": "git_trace_level_t level, git_trace_callback cb", + "sig": "git_trace_level_t::git_trace_callback", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Sets the system tracing configuration to the specified level with the\n specified callback. When system events occur at a level equal to, or\n lower than, the given level they will be reported to the given callback.

\n", + "comments": "", + "group": "trace" + }, + "git_cred_has_username": { + "type": "function", + "file": "transport.h", + "line": 190, + "lineto": 190, + "args": [ + { + "name": "cred", + "type": "git_cred *", + "comment": "object to check" + } + ], + "argline": "git_cred *cred", + "sig": "git_cred *", + "return": { + "type": "int", + "comment": " 1 if the credential object has non-NULL username, 0 otherwise" + }, + "description": "

Check whether a credential object contains username information.

\n", + "comments": "", + "group": "cred" + }, + "git_cred_userpass_plaintext_new": { + "type": "function", + "file": "transport.h", + "line": 201, + "lineto": 204, + "args": [ + { + "name": "out", + "type": "git_cred **", + "comment": "The newly created credential object." + }, + { + "name": "username", + "type": "const char *", + "comment": "The username of the credential." + }, + { + "name": "password", + "type": "const char *", + "comment": "The password of the credential." + } + ], + "argline": "git_cred **out, const char *username, const char *password", + "sig": "git_cred **::const char *::const char *", + "return": { + "type": "int", + "comment": " 0 for success or an error code for failure" + }, + "description": "

Create a new plain-text username and password credential object.\n The supplied credential parameter will be internally duplicated.

\n", + "comments": "", + "group": "cred" + }, + "git_cred_ssh_key_new": { + "type": "function", + "file": "transport.h", + "line": 217, + "lineto": 222, + "args": [ + { + "name": "out", + "type": "git_cred **", + "comment": "The newly created credential object." + }, + { + "name": "username", + "type": "const char *", + "comment": "username to use to authenticate" + }, + { + "name": "publickey", + "type": "const char *", + "comment": "The path to the public key of the credential." + }, + { + "name": "privatekey", + "type": "const char *", + "comment": "The path to the private key of the credential." + }, + { + "name": "passphrase", + "type": "const char *", + "comment": "The passphrase of the credential." + } + ], + "argline": "git_cred **out, const char *username, const char *publickey, const char *privatekey, const char *passphrase", + "sig": "git_cred **::const char *::const char *::const char *::const char *", + "return": { + "type": "int", + "comment": " 0 for success or an error code for failure" + }, + "description": "

Create a new passphrase-protected ssh key credential object.\n The supplied credential parameter will be internally duplicated.

\n", + "comments": "", + "group": "cred" + }, + "git_cred_ssh_interactive_new": { + "type": "function", + "file": "transport.h", + "line": 233, + "lineto": 237, + "args": [ + { + "name": "out", + "type": "git_cred **", + "comment": null + }, + { + "name": "username", + "type": "const char *", + "comment": "Username to use to authenticate." + }, + { + "name": "prompt_callback", + "type": "git_cred_ssh_interactive_callback", + "comment": "The callback method used for prompts." + }, + { + "name": "payload", + "type": "void *", + "comment": "Additional data to pass to the callback." + } + ], + "argline": "git_cred **out, const char *username, git_cred_ssh_interactive_callback prompt_callback, void *payload", + "sig": "git_cred **::const char *::git_cred_ssh_interactive_callback::void *", + "return": { + "type": "int", + "comment": " 0 for success or an error code for failure." + }, + "description": "

Create a new ssh keyboard-interactive based credential object.\n The supplied credential parameter will be internally duplicated.

\n", + "comments": "", + "group": "cred" + }, + "git_cred_ssh_key_from_agent": { + "type": "function", + "file": "transport.h", + "line": 247, + "lineto": 249, + "args": [ + { + "name": "out", + "type": "git_cred **", + "comment": "The newly created credential object." + }, + { + "name": "username", + "type": "const char *", + "comment": "username to use to authenticate" + } + ], + "argline": "git_cred **out, const char *username", + "sig": "git_cred **::const char *", + "return": { + "type": "int", + "comment": " 0 for success or an error code for failure" + }, + "description": "

Create a new ssh key credential object used for querying an ssh-agent.\n The supplied credential parameter will be internally duplicated.

\n", + "comments": "", + "group": "cred" + }, + "git_cred_ssh_custom_new": { + "type": "function", + "file": "transport.h", + "line": 269, + "lineto": 275, + "args": [ + { + "name": "out", + "type": "git_cred **", + "comment": "The newly created credential object." + }, + { + "name": "username", + "type": "const char *", + "comment": "username to use to authenticate" + }, + { + "name": "publickey", + "type": "const char *", + "comment": "The bytes of the public key." + }, + { + "name": "publickey_len", + "type": "size_t", + "comment": "The length of the public key in bytes." + }, + { + "name": "sign_callback", + "type": "git_cred_sign_callback", + "comment": "The callback method to sign the data during the challenge." + }, + { + "name": "payload", + "type": "void *", + "comment": "Additional data to pass to the callback." + } + ], + "argline": "git_cred **out, const char *username, const char *publickey, size_t publickey_len, git_cred_sign_callback sign_callback, void *payload", + "sig": "git_cred **::const char *::const char *::size_t::git_cred_sign_callback::void *", + "return": { + "type": "int", + "comment": " 0 for success or an error code for failure" + }, + "description": "

Create an ssh key credential with a custom signing function.

\n", + "comments": "

This lets you use your own function to sign the challenge.

\n\n

This function and its credential type is provided for completeness and wraps libssh2_userauth_publickey(), which is undocumented.

\n\n

The supplied credential parameter will be internally duplicated.

\n", + "group": "cred" + }, + "git_cred_default_new": { + "type": "function", + "file": "transport.h", + "line": 283, + "lineto": 283, + "args": [ + { + "name": "out", + "type": "git_cred **", + "comment": null + } + ], + "argline": "git_cred **out", + "sig": "git_cred **", + "return": { + "type": "int", + "comment": " 0 for success or an error code for failure" + }, + "description": "

Create a "default" credential usable for Negotiate mechanisms like NTLM\n or Kerberos authentication.

\n", + "comments": "", + "group": "cred" + }, + "git_cred_username_new": { + "type": "function", + "file": "transport.h", + "line": 291, + "lineto": 291, + "args": [ + { + "name": "cred", + "type": "git_cred **", + "comment": null + }, + { + "name": "username", + "type": "const char *", + "comment": null + } + ], + "argline": "git_cred **cred, const char *username", + "sig": "git_cred **::const char *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Create a credential to specify a username.

\n", + "comments": "

This is used with ssh authentication to query for the username if none is specified in the url.

\n", + "group": "cred" + }, + "git_cred_ssh_key_memory_new": { + "type": "function", + "file": "transport.h", + "line": 303, + "lineto": 308, + "args": [ + { + "name": "out", + "type": "git_cred **", + "comment": "The newly created credential object." + }, + { + "name": "username", + "type": "const char *", + "comment": "username to use to authenticate." + }, + { + "name": "publickey", + "type": "const char *", + "comment": "The public key of the credential." + }, + { + "name": "privatekey", + "type": "const char *", + "comment": "The private key of the credential." + }, + { + "name": "passphrase", + "type": "const char *", + "comment": "The passphrase of the credential." + } + ], + "argline": "git_cred **out, const char *username, const char *publickey, const char *privatekey, const char *passphrase", + "sig": "git_cred **::const char *::const char *::const char *::const char *", + "return": { + "type": "int", + "comment": " 0 for success or an error code for failure" + }, + "description": "

Create a new ssh key credential object reading the keys from memory.

\n", + "comments": "", + "group": "cred" + }, + "git_cred_free": { + "type": "function", + "file": "transport.h", + "line": 319, + "lineto": 319, + "args": [ + { + "name": "cred", + "type": "git_cred *", + "comment": "the object to free" + } + ], + "argline": "git_cred *cred", + "sig": "git_cred *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Free a credential.

\n", + "comments": "

This is only necessary if you own the object; that is, if you are a transport.

\n", + "group": "cred" + }, + "git_tree_lookup": { + "type": "function", + "file": "tree.h", + "line": 32, + "lineto": 33, + "args": [ + { + "name": "out", + "type": "git_tree **", + "comment": "Pointer to the looked up tree" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "The repo to use when locating the tree." + }, + { + "name": "id", + "type": "const git_oid *", + "comment": "Identity of the tree to locate." + } + ], + "argline": "git_tree **out, git_repository *repo, const git_oid *id", + "sig": "git_tree **::git_repository *::const git_oid *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Lookup a tree object from the repository.

\n", + "comments": "", + "group": "tree", + "examples": { + "general.c": [ + "ex/HEAD/general.html#git_tree_lookup-75", + "ex/HEAD/general.html#git_tree_lookup-76" + ], + "init.c": [ + "ex/HEAD/init.html#git_tree_lookup-14" + ] + } + }, + "git_tree_lookup_prefix": { + "type": "function", + "file": "tree.h", + "line": 47, + "lineto": 51, + "args": [ + { + "name": "out", + "type": "git_tree **", + "comment": "pointer to the looked up tree" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repo to use when locating the tree." + }, + { + "name": "id", + "type": "const git_oid *", + "comment": "identity of the tree to locate." + }, + { + "name": "len", + "type": "size_t", + "comment": "the length of the short identifier" + } + ], + "argline": "git_tree **out, git_repository *repo, const git_oid *id, size_t len", + "sig": "git_tree **::git_repository *::const git_oid *::size_t", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Lookup a tree object from the repository,\n given a prefix of its identifier (short id).

\n", + "comments": "", + "group": "tree" + }, + "git_tree_free": { + "type": "function", + "file": "tree.h", + "line": 63, + "lineto": 63, + "args": [ + { + "name": "tree", + "type": "git_tree *", + "comment": "The tree to close" + } + ], + "argline": "git_tree *tree", + "sig": "git_tree *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Close an open tree

\n", + "comments": "

You can no longer use the git_tree pointer after this call.

\n\n

IMPORTANT: You MUST call this method when you stop using a tree to release memory. Failure to do so will cause a memory leak.

\n", + "group": "tree", + "examples": { + "diff.c": [ + "ex/HEAD/diff.html#git_tree_free-17", + "ex/HEAD/diff.html#git_tree_free-18" + ], + "init.c": [ + "ex/HEAD/init.html#git_tree_free-15" + ], + "log.c": [ + "ex/HEAD/log.html#git_tree_free-58", + "ex/HEAD/log.html#git_tree_free-59", + "ex/HEAD/log.html#git_tree_free-60", + "ex/HEAD/log.html#git_tree_free-61", + "ex/HEAD/log.html#git_tree_free-62" + ] + } + }, + "git_tree_id": { + "type": "function", + "file": "tree.h", + "line": 71, + "lineto": 71, + "args": [ + { + "name": "tree", + "type": "const git_tree *", + "comment": "a previously loaded tree." + } + ], + "argline": "const git_tree *tree", + "sig": "const git_tree *", + "return": { + "type": "const git_oid *", + "comment": " object identity for the tree." + }, + "description": "

Get the id of a tree.

\n", + "comments": "", + "group": "tree" + }, + "git_tree_owner": { + "type": "function", + "file": "tree.h", + "line": 79, + "lineto": 79, + "args": [ + { + "name": "tree", + "type": "const git_tree *", + "comment": "A previously loaded tree." + } + ], + "argline": "const git_tree *tree", + "sig": "const git_tree *", + "return": { + "type": "git_repository *", + "comment": " Repository that contains this tree." + }, + "description": "

Get the repository that contains the tree.

\n", + "comments": "", + "group": "tree" + }, + "git_tree_entrycount": { + "type": "function", + "file": "tree.h", + "line": 87, + "lineto": 87, + "args": [ + { + "name": "tree", + "type": "const git_tree *", + "comment": "a previously loaded tree." + } + ], + "argline": "const git_tree *tree", + "sig": "const git_tree *", + "return": { + "type": "size_t", + "comment": " the number of entries in the tree" + }, + "description": "

Get the number of entries listed in a tree

\n", + "comments": "", + "group": "tree", + "examples": { + "cat-file.c": [ + "ex/HEAD/cat-file.html#git_tree_entrycount-41" + ], + "general.c": [ + "ex/HEAD/general.html#git_tree_entrycount-77" + ] + } + }, + "git_tree_entry_byname": { + "type": "function", + "file": "tree.h", + "line": 99, + "lineto": 100, + "args": [ + { + "name": "tree", + "type": "const git_tree *", + "comment": "a previously loaded tree." + }, + { + "name": "filename", + "type": "const char *", + "comment": "the filename of the desired entry" + } + ], + "argline": "const git_tree *tree, const char *filename", + "sig": "const git_tree *::const char *", + "return": { + "type": "const git_tree_entry *", + "comment": " the tree entry; NULL if not found" + }, + "description": "

Lookup a tree entry by its filename

\n", + "comments": "

This returns a git_tree_entry that is owned by the git_tree. You don't have to free it, but you must not use it after the git_tree is released.

\n", + "group": "tree", + "examples": { + "general.c": [ + "ex/HEAD/general.html#git_tree_entry_byname-78" + ] + } + }, + "git_tree_entry_byindex": { + "type": "function", + "file": "tree.h", + "line": 112, + "lineto": 113, + "args": [ + { + "name": "tree", + "type": "const git_tree *", + "comment": "a previously loaded tree." + }, + { + "name": "idx", + "type": "size_t", + "comment": "the position in the entry list" + } + ], + "argline": "const git_tree *tree, size_t idx", + "sig": "const git_tree *::size_t", + "return": { + "type": "const git_tree_entry *", + "comment": " the tree entry; NULL if not found" + }, + "description": "

Lookup a tree entry by its position in the tree

\n", + "comments": "

This returns a git_tree_entry that is owned by the git_tree. You don't have to free it, but you must not use it after the git_tree is released.

\n", + "group": "tree", + "examples": { + "cat-file.c": [ + "ex/HEAD/cat-file.html#git_tree_entry_byindex-42" + ], + "general.c": [ + "ex/HEAD/general.html#git_tree_entry_byindex-79" + ] + } + }, + "git_tree_entry_byid": { + "type": "function", + "file": "tree.h", + "line": 127, + "lineto": 128, + "args": [ + { + "name": "tree", + "type": "const git_tree *", + "comment": "a previously loaded tree." + }, + { + "name": "id", + "type": "const git_oid *", + "comment": "the sha being looked for" + } + ], + "argline": "const git_tree *tree, const git_oid *id", + "sig": "const git_tree *::const git_oid *", + "return": { + "type": "const git_tree_entry *", + "comment": " the tree entry; NULL if not found" + }, + "description": "

Lookup a tree entry by SHA value.

\n", + "comments": "

This returns a git_tree_entry that is owned by the git_tree. You don't have to free it, but you must not use it after the git_tree is released.

\n\n

Warning: this must examine every entry in the tree, so it is not fast.

\n", + "group": "tree" + }, + "git_tree_entry_bypath": { + "type": "function", + "file": "tree.h", + "line": 142, + "lineto": 145, + "args": [ + { + "name": "out", + "type": "git_tree_entry **", + "comment": "Pointer where to store the tree entry" + }, + { + "name": "root", + "type": "const git_tree *", + "comment": "Previously loaded tree which is the root of the relative path" + }, + { + "name": "path", + "type": "const char *", + "comment": "Path to the contained entry" + } + ], + "argline": "git_tree_entry **out, const git_tree *root, const char *path", + "sig": "git_tree_entry **::const git_tree *::const char *", + "return": { + "type": "int", + "comment": " 0 on success; GIT_ENOTFOUND if the path does not exist" + }, + "description": "

Retrieve a tree entry contained in a tree or in any of its subtrees,\n given its relative path.

\n", + "comments": "

Unlike the other lookup functions, the returned tree entry is owned by the user and must be freed explicitly with git_tree_entry_free().

\n", + "group": "tree" + }, + "git_tree_entry_dup": { + "type": "function", + "file": "tree.h", + "line": 157, + "lineto": 157, + "args": [ + { + "name": "dest", + "type": "git_tree_entry **", + "comment": "pointer where to store the copy" + }, + { + "name": "source", + "type": "const git_tree_entry *", + "comment": "tree entry to duplicate" + } + ], + "argline": "git_tree_entry **dest, const git_tree_entry *source", + "sig": "git_tree_entry **::const git_tree_entry *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Duplicate a tree entry

\n", + "comments": "

Create a copy of a tree entry. The returned copy is owned by the user, and must be freed explicitly with git_tree_entry_free().

\n", + "group": "tree" + }, + "git_tree_entry_free": { + "type": "function", + "file": "tree.h", + "line": 168, + "lineto": 168, + "args": [ + { + "name": "entry", + "type": "git_tree_entry *", + "comment": "The entry to free" + } + ], + "argline": "git_tree_entry *entry", + "sig": "git_tree_entry *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Free a user-owned tree entry

\n", + "comments": "

IMPORTANT: This function is only needed for tree entries owned by the user, such as the ones returned by git_tree_entry_dup() or git_tree_entry_bypath().

\n", + "group": "tree" + }, + "git_tree_entry_name": { + "type": "function", + "file": "tree.h", + "line": 176, + "lineto": 176, + "args": [ + { + "name": "entry", + "type": "const git_tree_entry *", + "comment": "a tree entry" + } + ], + "argline": "const git_tree_entry *entry", + "sig": "const git_tree_entry *", + "return": { + "type": "const char *", + "comment": " the name of the file" + }, + "description": "

Get the filename of a tree entry

\n", + "comments": "", + "group": "tree", + "examples": { + "cat-file.c": [ + "ex/HEAD/cat-file.html#git_tree_entry_name-43" + ], + "general.c": [ + "ex/HEAD/general.html#git_tree_entry_name-80", + "ex/HEAD/general.html#git_tree_entry_name-81" + ] + } + }, + "git_tree_entry_id": { + "type": "function", + "file": "tree.h", + "line": 184, + "lineto": 184, + "args": [ + { + "name": "entry", + "type": "const git_tree_entry *", + "comment": "a tree entry" + } + ], + "argline": "const git_tree_entry *entry", + "sig": "const git_tree_entry *", + "return": { + "type": "const git_oid *", + "comment": " the oid of the object" + }, + "description": "

Get the id of the object pointed by the entry

\n", + "comments": "", + "group": "tree", + "examples": { + "cat-file.c": [ + "ex/HEAD/cat-file.html#git_tree_entry_id-44" + ] + } + }, + "git_tree_entry_type": { + "type": "function", + "file": "tree.h", + "line": 192, + "lineto": 192, + "args": [ + { + "name": "entry", + "type": "const git_tree_entry *", + "comment": "a tree entry" + } + ], + "argline": "const git_tree_entry *entry", + "sig": "const git_tree_entry *", + "return": { + "type": "git_otype", + "comment": " the type of the pointed object" + }, + "description": "

Get the type of the object pointed by the entry

\n", + "comments": "", + "group": "tree", + "examples": { + "cat-file.c": [ + "ex/HEAD/cat-file.html#git_tree_entry_type-45" + ] + } + }, + "git_tree_entry_filemode": { + "type": "function", + "file": "tree.h", + "line": 200, + "lineto": 200, + "args": [ + { + "name": "entry", + "type": "const git_tree_entry *", + "comment": "a tree entry" + } + ], + "argline": "const git_tree_entry *entry", + "sig": "const git_tree_entry *", + "return": { + "type": "git_filemode_t", + "comment": " filemode as an integer" + }, + "description": "

Get the UNIX file attributes of a tree entry

\n", + "comments": "", + "group": "tree", + "examples": { + "cat-file.c": [ + "ex/HEAD/cat-file.html#git_tree_entry_filemode-46" + ] + } + }, + "git_tree_entry_filemode_raw": { + "type": "function", + "file": "tree.h", + "line": 212, + "lineto": 212, + "args": [ + { + "name": "entry", + "type": "const git_tree_entry *", + "comment": "a tree entry" + } + ], + "argline": "const git_tree_entry *entry", + "sig": "const git_tree_entry *", + "return": { + "type": "git_filemode_t", + "comment": " filemode as an integer" + }, + "description": "

Get the raw UNIX file attributes of a tree entry

\n", + "comments": "

This function does not perform any normalization and is only useful if you need to be able to recreate the original tree object.

\n", + "group": "tree" + }, + "git_tree_entry_cmp": { + "type": "function", + "file": "tree.h", + "line": 220, + "lineto": 220, + "args": [ + { + "name": "e1", + "type": "const git_tree_entry *", + "comment": "first tree entry" + }, + { + "name": "e2", + "type": "const git_tree_entry *", + "comment": "second tree entry" + } + ], + "argline": "const git_tree_entry *e1, const git_tree_entry *e2", + "sig": "const git_tree_entry *::const git_tree_entry *", + "return": { + "type": "int", + "comment": " \n<\n0 if e1 is before e2, 0 if e1 == e2, >0 if e1 is after e2" + }, + "description": "

Compare two tree entries

\n", + "comments": "", + "group": "tree" + }, + "git_tree_entry_to_object": { + "type": "function", + "file": "tree.h", + "line": 232, + "lineto": 235, + "args": [ + { + "name": "object_out", + "type": "git_object **", + "comment": "pointer to the converted object" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "repository where to lookup the pointed object" + }, + { + "name": "entry", + "type": "const git_tree_entry *", + "comment": "a tree entry" + } + ], + "argline": "git_object **object_out, git_repository *repo, const git_tree_entry *entry", + "sig": "git_object **::git_repository *::const git_tree_entry *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Convert a tree entry to the git_object it points to.

\n", + "comments": "

You must call git_object_free() on the object when you are done with it.

\n", + "group": "tree", + "examples": { + "general.c": [ + "ex/HEAD/general.html#git_tree_entry_to_object-82" + ] + } + }, + "git_treebuilder_new": { + "type": "function", + "file": "tree.h", + "line": 254, + "lineto": 255, + "args": [ + { + "name": "out", + "type": "git_treebuilder **", + "comment": "Pointer where to store the tree builder" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "Repository in which to store the object" + }, + { + "name": "source", + "type": "const git_tree *", + "comment": "Source tree to initialize the builder (optional)" + } + ], + "argline": "git_treebuilder **out, git_repository *repo, const git_tree *source", + "sig": "git_treebuilder **::git_repository *::const git_tree *", + "return": { + "type": "int", + "comment": " 0 on success; error code otherwise" + }, + "description": "

Create a new tree builder.

\n", + "comments": "

The tree builder can be used to create or modify trees in memory and write them as tree objects to the database.

\n\n

If the source parameter is not NULL, the tree builder will be initialized with the entries of the given tree.

\n\n

If the source parameter is NULL, the tree builder will start with no entries and will have to be filled manually.

\n", + "group": "treebuilder" + }, + "git_treebuilder_clear": { + "type": "function", + "file": "tree.h", + "line": 262, + "lineto": 262, + "args": [ + { + "name": "bld", + "type": "git_treebuilder *", + "comment": "Builder to clear" + } + ], + "argline": "git_treebuilder *bld", + "sig": "git_treebuilder *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Clear all the entires in the builder

\n", + "comments": "", + "group": "treebuilder" + }, + "git_treebuilder_entrycount": { + "type": "function", + "file": "tree.h", + "line": 270, + "lineto": 270, + "args": [ + { + "name": "bld", + "type": "git_treebuilder *", + "comment": "a previously loaded treebuilder." + } + ], + "argline": "git_treebuilder *bld", + "sig": "git_treebuilder *", + "return": { + "type": "unsigned int", + "comment": " the number of entries in the treebuilder" + }, + "description": "

Get the number of entries listed in a treebuilder

\n", + "comments": "", + "group": "treebuilder" + }, + "git_treebuilder_free": { + "type": "function", + "file": "tree.h", + "line": 281, + "lineto": 281, + "args": [ + { + "name": "bld", + "type": "git_treebuilder *", + "comment": "Builder to free" + } + ], + "argline": "git_treebuilder *bld", + "sig": "git_treebuilder *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Free a tree builder

\n", + "comments": "

This will clear all the entries and free to builder. Failing to free the builder after you're done using it will result in a memory leak

\n", + "group": "treebuilder" + }, + "git_treebuilder_get": { + "type": "function", + "file": "tree.h", + "line": 293, + "lineto": 294, + "args": [ + { + "name": "bld", + "type": "git_treebuilder *", + "comment": "Tree builder" + }, + { + "name": "filename", + "type": "const char *", + "comment": "Name of the entry" + } + ], + "argline": "git_treebuilder *bld, const char *filename", + "sig": "git_treebuilder *::const char *", + "return": { + "type": "const git_tree_entry *", + "comment": " pointer to the entry; NULL if not found" + }, + "description": "

Get an entry from the builder from its filename

\n", + "comments": "

The returned entry is owned by the builder and should not be freed manually.

\n", + "group": "treebuilder" + }, + "git_treebuilder_insert": { + "type": "function", + "file": "tree.h", + "line": 323, + "lineto": 328, + "args": [ + { + "name": "out", + "type": "const git_tree_entry **", + "comment": "Pointer to store the entry (optional)" + }, + { + "name": "bld", + "type": "git_treebuilder *", + "comment": "Tree builder" + }, + { + "name": "filename", + "type": "const char *", + "comment": "Filename of the entry" + }, + { + "name": "id", + "type": "const git_oid *", + "comment": "SHA1 oid of the entry" + }, + { + "name": "filemode", + "type": "git_filemode_t", + "comment": "Folder attributes of the entry. This parameter must\n\t\t\tbe valued with one of the following entries: 0040000, 0100644,\n\t\t\t0100755, 0120000 or 0160000." + } + ], + "argline": "const git_tree_entry **out, git_treebuilder *bld, const char *filename, const git_oid *id, git_filemode_t filemode", + "sig": "const git_tree_entry **::git_treebuilder *::const char *::const git_oid *::git_filemode_t", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Add or update an entry to the builder

\n", + "comments": "

Insert a new entry for filename in the builder with the given attributes.

\n\n

If an entry named filename already exists, its attributes will be updated with the given ones.

\n\n

The optional pointer out can be used to retrieve a pointer to the newly created/updated entry. Pass NULL if you do not need it. The pointer may not be valid past the next operation in this builder. Duplicate the entry if you want to keep it.

\n\n

No attempt is being made to ensure that the provided oid points to an existing git object in the object database, nor that the attributes make sense regarding the type of the pointed at object.

\n", + "group": "treebuilder" + }, + "git_treebuilder_remove": { + "type": "function", + "file": "tree.h", + "line": 336, + "lineto": 337, + "args": [ + { + "name": "bld", + "type": "git_treebuilder *", + "comment": "Tree builder" + }, + { + "name": "filename", + "type": "const char *", + "comment": "Filename of the entry to remove" + } + ], + "argline": "git_treebuilder *bld, const char *filename", + "sig": "git_treebuilder *::const char *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Remove an entry from the builder by its filename

\n", + "comments": "", + "group": "treebuilder" + }, + "git_treebuilder_filter": { + "type": "function", + "file": "tree.h", + "line": 360, + "lineto": 363, + "args": [ + { + "name": "bld", + "type": "git_treebuilder *", + "comment": "Tree builder" + }, + { + "name": "filter", + "type": "git_treebuilder_filter_cb", + "comment": "Callback to filter entries" + }, + { + "name": "payload", + "type": "void *", + "comment": "Extra data to pass to filter callback" + } + ], + "argline": "git_treebuilder *bld, git_treebuilder_filter_cb filter, void *payload", + "sig": "git_treebuilder *::git_treebuilder_filter_cb::void *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Selectively remove entries in the tree

\n", + "comments": "

The filter callback will be called for each entry in the tree with a pointer to the entry and the provided payload; if the callback returns non-zero, the entry will be filtered (removed from the builder).

\n", + "group": "treebuilder" + }, + "git_treebuilder_write": { + "type": "function", + "file": "tree.h", + "line": 375, + "lineto": 376, + "args": [ + { + "name": "id", + "type": "git_oid *", + "comment": "Pointer to store the OID of the newly written tree" + }, + { + "name": "bld", + "type": "git_treebuilder *", + "comment": "Tree builder to write" + } + ], + "argline": "git_oid *id, git_treebuilder *bld", + "sig": "git_oid *::git_treebuilder *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Write the contents of the tree builder as a tree object

\n", + "comments": "

The tree builder will be written to the given repo, and its identifying SHA1 hash will be stored in the id pointer.

\n", + "group": "treebuilder" + }, + "git_tree_walk": { + "type": "function", + "file": "tree.h", + "line": 406, + "lineto": 410, + "args": [ + { + "name": "tree", + "type": "const git_tree *", + "comment": "The tree to walk" + }, + { + "name": "mode", + "type": "git_treewalk_mode", + "comment": "Traversal mode (pre or post-order)" + }, + { + "name": "callback", + "type": "git_treewalk_cb", + "comment": "Function to call on each tree entry" + }, + { + "name": "payload", + "type": "void *", + "comment": "Opaque pointer to be passed on each callback" + } + ], + "argline": "const git_tree *tree, git_treewalk_mode mode, git_treewalk_cb callback, void *payload", + "sig": "const git_tree *::git_treewalk_mode::git_treewalk_cb::void *", + "return": { + "type": "int", + "comment": " 0 or an error code" + }, + "description": "

Traverse the entries in a tree and its subtrees in post or pre order.

\n", + "comments": "

The entries will be traversed in the specified order, children subtrees will be automatically loaded as required, and the callback will be called once per entry with the current (relative) root for the entry and the entry data itself.

\n\n

If the callback returns a positive value, the passed entry will be skipped on the traversal (in pre mode). A negative value stops the walk.

\n", + "group": "tree" + }, + "git_tree_dup": { + "type": "function", + "file": "tree.h", + "line": 419, + "lineto": 419, + "args": [ + { + "name": "out", + "type": "git_tree **", + "comment": "Pointer to store the copy of the tree" + }, + { + "name": "source", + "type": "git_tree *", + "comment": "Original tree to copy" + } + ], + "argline": "git_tree **out, git_tree *source", + "sig": "git_tree **::git_tree *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Create an in-memory copy of a tree. The copy must be explicitly\n free'd or it will leak.

\n", + "comments": "", + "group": "tree" + } + }, + "callbacks": { + "git_checkout_notify_cb": { + "type": "callback", + "file": "checkout.h", + "line": 223, + "lineto": 229, + "args": [ + { + "name": "why", + "type": "git_checkout_notify_t", + "comment": null + }, + { + "name": "path", + "type": "const char *", + "comment": null + }, + { + "name": "baseline", + "type": "const git_diff_file *", + "comment": null + }, + { + "name": "target", + "type": "const git_diff_file *", + "comment": null + }, + { + "name": "workdir", + "type": "const git_diff_file *", + "comment": null + }, + { + "name": "payload", + "type": "void *", + "comment": null + } + ], + "argline": "git_checkout_notify_t why, const char *path, const git_diff_file *baseline, const git_diff_file *target, const git_diff_file *workdir, void *payload", + "sig": "git_checkout_notify_t::const char *::const git_diff_file *::const git_diff_file *::const git_diff_file *::void *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Checkout notification callback function

\n", + "comments": "" + }, + "git_checkout_progress_cb": { + "type": "callback", + "file": "checkout.h", + "line": 232, + "lineto": 236, + "args": [ + { + "name": "path", + "type": "const char *", + "comment": null + }, + { + "name": "completed_steps", + "type": "size_t", + "comment": null + }, + { + "name": "total_steps", + "type": "size_t", + "comment": null + }, + { + "name": "payload", + "type": "void *", + "comment": null + } + ], + "argline": "const char *path, size_t completed_steps, size_t total_steps, void *payload", + "sig": "const char *::size_t::size_t::void *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Checkout progress notification function

\n", + "comments": "" + }, + "git_checkout_perfdata_cb": { + "type": "callback", + "file": "checkout.h", + "line": 239, + "lineto": 241, + "args": [ + { + "name": "perfdata", + "type": "const git_checkout_perfdata *", + "comment": null + }, + { + "name": "payload", + "type": "void *", + "comment": null + } + ], + "argline": "const git_checkout_perfdata *perfdata, void *payload", + "sig": "const git_checkout_perfdata *::void *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Checkout perfdata notification function

\n", + "comments": "" + }, + "git_remote_create_cb": { + "type": "callback", + "file": "clone.h", + "line": 69, + "lineto": 74, + "args": [ + { + "name": "out", + "type": "git_remote **", + "comment": "the resulting remote" + }, + { + "name": "repo", + "type": "git_repository *", + "comment": "the repository in which to create the remote" + }, + { + "name": "name", + "type": "const char *", + "comment": "the remote's name" + }, + { + "name": "url", + "type": "const char *", + "comment": "the remote's url" + }, + { + "name": "payload", + "type": "void *", + "comment": "an opaque payload" + } + ], + "argline": "git_remote **out, git_repository *repo, const char *name, const char *url, void *payload", + "sig": "git_remote **::git_repository *::const char *::const char *::void *", + "return": { + "type": "int", + "comment": " 0, GIT_EINVALIDSPEC, GIT_EEXISTS or an error code" + }, + "description": "

The signature of a function matching git_remote_create, with an additional\n void* as a callback payload.

\n", + "comments": "

Callers of git_clone may provide a function matching this signature to override the remote creation and customization process during a clone operation.

\n" + }, + "git_repository_create_cb": { + "type": "callback", + "file": "clone.h", + "line": 90, + "lineto": 94, + "args": [ + { + "name": "out", + "type": "git_repository **", + "comment": "the resulting repository" + }, + { + "name": "path", + "type": "const char *", + "comment": "path in which to create the repository" + }, + { + "name": "bare", + "type": "int", + "comment": "whether the repository is bare. This is the value from the clone options" + }, + { + "name": "payload", + "type": "void *", + "comment": "payload specified by the options" + } + ], + "argline": "git_repository **out, const char *path, int bare, void *payload", + "sig": "git_repository **::const char *::int::void *", + "return": { + "type": "int", + "comment": " 0, or a negative value to indicate error" + }, + "description": "

The signature of a function matchin git_repository_init, with an\n aditional void * as callback payload.

\n", + "comments": "

Callers of git_clone my provide a function matching this signature to override the repository creation and customization process during a clone operation.

\n" + }, + "git_diff_notify_cb": { + "type": "callback", + "file": "diff.h", + "line": 347, + "lineto": 351, + "args": [ + { + "name": "diff_so_far", + "type": "const git_diff *", + "comment": null + }, + { + "name": "delta_to_add", + "type": "const git_diff_delta *", + "comment": null + }, + { + "name": "matched_pathspec", + "type": "const char *", + "comment": null + }, + { + "name": "payload", + "type": "void *", + "comment": null + } + ], + "argline": "const git_diff *diff_so_far, const git_diff_delta *delta_to_add, const char *matched_pathspec, void *payload", + "sig": "const git_diff *::const git_diff_delta *::const char *::void *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Diff notification callback function.

\n", + "comments": "

The callback will be called for each file, just before the git_delta_t gets inserted into the diff.

\n\n

When the callback: - returns < 0, the diff process will be aborted. - returns > 0, the delta will not be inserted into the diff, but the diff process continues. - returns 0, the delta is inserted into the diff, and the diff process continues.

\n" + }, + "git_diff_progress_cb": { + "type": "callback", + "file": "diff.h", + "line": 363, + "lineto": 367, + "args": [ + { + "name": "diff_so_far", + "type": "const git_diff *", + "comment": "The diff being generated." + }, + { + "name": "old_path", + "type": "const char *", + "comment": "The path to the old file or NULL." + }, + { + "name": "new_path", + "type": "const char *", + "comment": "The path to the new file or NULL." + }, + { + "name": "payload", + "type": "void *", + "comment": null + } + ], + "argline": "const git_diff *diff_so_far, const char *old_path, const char *new_path, void *payload", + "sig": "const git_diff *::const char *::const char *::void *", + "return": { + "type": "int", + "comment": " Non-zero to abort the diff." + }, + "description": "

Diff progress callback.

\n", + "comments": "

Called before each file comparison.

\n" + }, + "git_diff_file_cb": { + "type": "callback", + "file": "diff.h", + "line": 446, + "lineto": 449, + "args": [ + { + "name": "delta", + "type": "const git_diff_delta *", + "comment": "A pointer to the delta data for the file" + }, + { + "name": "progress", + "type": "float", + "comment": "Goes from 0 to 1 over the diff" + }, + { + "name": "payload", + "type": "void *", + "comment": "User-specified pointer from foreach function" + } + ], + "argline": "const git_diff_delta *delta, float progress, void *payload", + "sig": "const git_diff_delta *::float::void *", + "return": { + "type": "int", + "comment": null + }, + "description": "

When iterating over a diff, callback that will be made per file.

\n", + "comments": "" + }, + "git_diff_binary_cb": { + "type": "callback", + "file": "diff.h", + "line": 493, + "lineto": 496, + "args": [ + { + "name": "delta", + "type": "const git_diff_delta *", + "comment": null + }, + { + "name": "binary", + "type": "const git_diff_binary *", + "comment": null + }, + { + "name": "payload", + "type": "void *", + "comment": null + } + ], + "argline": "const git_diff_delta *delta, const git_diff_binary *binary, void *payload", + "sig": "const git_diff_delta *::const git_diff_binary *::void *", + "return": { + "type": "int", + "comment": null + }, + "description": "

When iterating over a diff, callback that will be made for\n binary content within the diff.

\n", + "comments": "" + }, + "git_diff_hunk_cb": { + "type": "callback", + "file": "diff.h", + "line": 513, + "lineto": 516, + "args": [ + { + "name": "delta", + "type": "const git_diff_delta *", + "comment": null + }, + { + "name": "hunk", + "type": "const git_diff_hunk *", + "comment": null + }, + { + "name": "payload", + "type": "void *", + "comment": null + } + ], + "argline": "const git_diff_delta *delta, const git_diff_hunk *hunk, void *payload", + "sig": "const git_diff_delta *::const git_diff_hunk *::void *", + "return": { + "type": "int", + "comment": null + }, + "description": "

When iterating over a diff, callback that will be made per hunk.

\n", + "comments": "" + }, + "git_diff_line_cb": { + "type": "callback", + "file": "diff.h", + "line": 566, + "lineto": 570, + "args": [ + { + "name": "delta", + "type": "const git_diff_delta *", + "comment": null + }, + { + "name": "hunk", + "type": "const git_diff_hunk *", + "comment": null + }, + { + "name": "line", + "type": "const git_diff_line *", + "comment": null + }, + { + "name": "payload", + "type": "void *", + "comment": null + } + ], + "argline": "const git_diff_delta *delta, const git_diff_hunk *hunk, const git_diff_line *line, void *payload", + "sig": "const git_diff_delta *::const git_diff_hunk *::const git_diff_line *::void *", + "return": { + "type": "int", + "comment": null + }, + "description": "

When iterating over a diff, callback that will be made per text diff\n line. In this context, the provided range will be NULL.

\n", + "comments": "

When printing a diff, callback that will be made to output each line of text. This uses some extra GIT_DIFF_LINE_... constants for output of lines of file and hunk headers.

\n" + }, + "git_index_matched_path_cb": { + "type": "callback", + "file": "index.h", + "line": 146, + "lineto": 147, + "args": [ + { + "name": "path", + "type": "const char *", + "comment": null + }, + { + "name": "matched_pathspec", + "type": "const char *", + "comment": null + }, + { + "name": "payload", + "type": "void *", + "comment": null + } + ], + "argline": "const char *path, const char *matched_pathspec, void *payload", + "sig": "const char *::const char *::void *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Callback for APIs that add/remove/update files matching pathspec

\n", + "comments": "" + }, + "git_headlist_cb": { + "type": "callback", + "file": "net.h", + "line": 55, + "lineto": 55, + "args": [ + { + "name": "rhead", + "type": "git_remote_head *", + "comment": null + }, + { + "name": "payload", + "type": "void *", + "comment": null + } + ], + "argline": "git_remote_head *rhead, void *payload", + "sig": "git_remote_head *::void *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Callback for listing the remote heads

\n", + "comments": "" + }, + "git_note_foreach_cb": { + "type": "callback", + "file": "notes.h", + "line": 29, + "lineto": 30, + "args": [ + { + "name": "blob_id", + "type": "const git_oid *", + "comment": null + }, + { + "name": "annotated_object_id", + "type": "const git_oid *", + "comment": null + }, + { + "name": "payload", + "type": "void *", + "comment": null + } + ], + "argline": "const git_oid *blob_id, const git_oid *annotated_object_id, void *payload", + "sig": "const git_oid *::const git_oid *::void *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Callback for git_note_foreach.

\n", + "comments": "

Receives: - blob_id: Oid of the blob containing the message - annotated_object_id: Oid of the git object being annotated - payload: Payload data passed to git_note_foreach

\n" + }, + "git_odb_foreach_cb": { + "type": "callback", + "file": "odb.h", + "line": 27, + "lineto": 27, + "args": [ + { + "name": "id", + "type": "const git_oid *", + "comment": null + }, + { + "name": "payload", + "type": "void *", + "comment": null + } + ], + "argline": "const git_oid *id, void *payload", + "sig": "const git_oid *::void *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Function type for callbacks from git_odb_foreach.

\n", + "comments": "" + }, + "git_packbuilder_progress": { + "type": "callback", + "file": "pack.h", + "line": 210, + "lineto": 214, + "args": [ + { + "name": "stage", + "type": "int", + "comment": null + }, + { + "name": "current", + "type": "unsigned int", + "comment": null + }, + { + "name": "total", + "type": "unsigned int", + "comment": null + }, + { + "name": "payload", + "type": "void *", + "comment": null + } + ], + "argline": "int stage, unsigned int current, unsigned int total, void *payload", + "sig": "int::unsigned int::unsigned int::void *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Packbuilder progress notification function

\n", + "comments": "" + }, + "git_remote_rename_problem_cb": { + "type": "callback", + "file": "remote.h", + "line": 29, + "lineto": 29, + "args": [ + { + "name": "problematic_refspec", + "type": "const char *", + "comment": null + }, + { + "name": "payload", + "type": "void *", + "comment": null + } + ], + "argline": "const char *problematic_refspec, void *payload", + "sig": "const char *::void *", + "return": { + "type": "int", + "comment": null + }, + "description": "

git2/remote.h

\n", + "comments": "

@{

\n" + }, + "git_push_transfer_progress": { + "type": "callback", + "file": "remote.h", + "line": 335, + "lineto": 339, + "args": [ + { + "name": "current", + "type": "unsigned int", + "comment": null + }, + { + "name": "total", + "type": "unsigned int", + "comment": null + }, + { + "name": "bytes", + "type": "size_t", + "comment": null + }, + { + "name": "payload", + "type": "void *", + "comment": null + } + ], + "argline": "unsigned int current, unsigned int total, size_t bytes, void *payload", + "sig": "unsigned int::unsigned int::size_t::void *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Push network progress notification function

\n", + "comments": "" + }, + "git_push_negotiation": { + "type": "callback", + "file": "remote.h", + "line": 368, + "lineto": 368, + "args": [ + { + "name": "updates", + "type": "const git_push_update **", + "comment": "an array containing the updates which will be sent\n as commands to the destination." + }, + { + "name": "len", + "type": "size_t", + "comment": "number of elements in `updates`" + }, + { + "name": "payload", + "type": "void *", + "comment": "Payload provided by the caller" + } + ], + "argline": "const git_push_update **updates, size_t len, void *payload", + "sig": "const git_push_update **::size_t::void *", + "return": { + "type": "int", + "comment": null + }, + "description": "", + "comments": "" + }, + "git_revwalk_hide_cb": { + "type": "callback", + "file": "revwalk.h", + "line": 279, + "lineto": 281, + "args": [ + { + "name": "commit_id", + "type": "const git_oid *", + "comment": "oid of Commit" + }, + { + "name": "payload", + "type": "void *", + "comment": "User-specified pointer to data to be passed as data payload" + } + ], + "argline": "const git_oid *commit_id, void *payload", + "sig": "const git_oid *::void *", + "return": { + "type": "int", + "comment": null + }, + "description": "

This is a callback function that user can provide to hide a\n commit and its parents. If the callback function returns non-zero value,\n then this commit and its parents will be hidden.

\n", + "comments": "" + }, + "git_stash_apply_progress_cb": { + "type": "callback", + "file": "stash.h", + "line": 113, + "lineto": 115, + "args": [ + { + "name": "progress", + "type": "git_stash_apply_progress_t", + "comment": null + }, + { + "name": "payload", + "type": "void *", + "comment": null + } + ], + "argline": "git_stash_apply_progress_t progress, void *payload", + "sig": "git_stash_apply_progress_t::void *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Stash application progress notification function.\n Return 0 to continue processing, or a negative value to\n abort the stash application.

\n", + "comments": "" + }, + "git_stash_cb": { + "type": "callback", + "file": "stash.h", + "line": 198, + "lineto": 202, + "args": [ + { + "name": "index", + "type": "size_t", + "comment": "The position within the stash list. 0 points to the\n most recent stashed state." + }, + { + "name": "message", + "type": "const char *", + "comment": "The stash message." + }, + { + "name": "stash_id", + "type": "const int *", + "comment": "The commit oid of the stashed state." + }, + { + "name": "payload", + "type": "void *", + "comment": "Extra parameter to callback function." + } + ], + "argline": "size_t index, const char *message, const int *stash_id, void *payload", + "sig": "size_t::const char *::const int *::void *", + "return": { + "type": "int", + "comment": " 0 to continue iterating or non-zero to stop." + }, + "description": "

This is a callback function you can provide to iterate over all the\n stashed states that will be invoked per entry.

\n", + "comments": "" + }, + "git_status_cb": { + "type": "callback", + "file": "status.h", + "line": 61, + "lineto": 62, + "args": [ + { + "name": "path", + "type": "const char *", + "comment": null + }, + { + "name": "status_flags", + "type": "unsigned int", + "comment": null + }, + { + "name": "payload", + "type": "void *", + "comment": null + } + ], + "argline": "const char *path, unsigned int status_flags, void *payload", + "sig": "const char *::unsigned int::void *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Function pointer to receive status on individual files

\n", + "comments": "

path is the relative path to the file from the root of the repository.

\n\n

status_flags is a combination of git_status_t values that apply.

\n\n

payload is the value you passed to the foreach function as payload.

\n" + }, + "git_submodule_cb": { + "type": "callback", + "file": "submodule.h", + "line": 118, + "lineto": 119, + "args": [ + { + "name": "sm", + "type": "git_submodule *", + "comment": "git_submodule currently being visited" + }, + { + "name": "name", + "type": "const char *", + "comment": "name of the submodule" + }, + { + "name": "payload", + "type": "void *", + "comment": "value you passed to the foreach function as payload" + } + ], + "argline": "git_submodule *sm, const char *name, void *payload", + "sig": "git_submodule *::const char *::void *", + "return": { + "type": "int", + "comment": " 0 on success or error code" + }, + "description": "

Function pointer to receive each submodule

\n", + "comments": "" + }, + "git_filter_init_fn": { + "type": "callback", + "file": "sys/filter.h", + "line": 141, + "lineto": 141, + "args": [ + { + "name": "self", + "type": "git_filter *", + "comment": null + } + ], + "argline": "git_filter *self", + "sig": "git_filter *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Initialize callback on filter

\n", + "comments": "

Specified as filter.initialize, this is an optional callback invoked before a filter is first used. It will be called once at most.

\n\n

If non-NULL, the filter's initialize callback will be invoked right before the first use of the filter, so you can defer expensive initialization operations (in case libgit2 is being used in a way that doesn't need the filter).

\n" + }, + "git_filter_shutdown_fn": { + "type": "callback", + "file": "sys/filter.h", + "line": 153, + "lineto": 153, + "args": [ + { + "name": "self", + "type": "git_filter *", + "comment": null + } + ], + "argline": "git_filter *self", + "sig": "git_filter *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Shutdown callback on filter

\n", + "comments": "

Specified as filter.shutdown, this is an optional callback invoked when the filter is unregistered or when libgit2 is shutting down. It will be called once at most and should release resources as needed. This may be called even if the initialize callback was not made.

\n\n

Typically this function will free the git_filter object itself.

\n" + }, + "git_filter_check_fn": { + "type": "callback", + "file": "sys/filter.h", + "line": 175, + "lineto": 179, + "args": [ + { + "name": "self", + "type": "git_filter *", + "comment": null + }, + { + "name": "payload", + "type": "void **", + "comment": null + }, + { + "name": "src", + "type": "const git_filter_source *", + "comment": null + }, + { + "name": "attr_values", + "type": "const char **", + "comment": null + } + ], + "argline": "git_filter *self, void **payload, const git_filter_source *src, const char **attr_values", + "sig": "git_filter *::void **::const git_filter_source *::const char **", + "return": { + "type": "int", + "comment": null + }, + "description": "

Callback to decide if a given source needs this filter

\n", + "comments": "

Specified as filter.check, this is an optional callback that checks if filtering is needed for a given source.

\n\n

It should return 0 if the filter should be applied (i.e. success), GIT_PASSTHROUGH if the filter should not be applied, or an error code to fail out of the filter processing pipeline and return to the caller.

\n\n

The attr_values will be set to the values of any attributes given in the filter definition. See git_filter below for more detail.

\n\n

The payload will be a pointer to a reference payload for the filter. This will start as NULL, but check can assign to this pointer for later use by the apply callback. Note that the value should be heap allocated (not stack), so that it doesn't go away before the apply callback can use it. If a filter allocates and assigns a value to the payload, it will need a cleanup callback to free the payload.

\n" + }, + "git_filter_apply_fn": { + "type": "callback", + "file": "sys/filter.h", + "line": 193, + "lineto": 198, + "args": [ + { + "name": "self", + "type": "git_filter *", + "comment": null + }, + { + "name": "payload", + "type": "void **", + "comment": null + }, + { + "name": "to", + "type": "git_buf *", + "comment": null + }, + { + "name": "from", + "type": "const git_buf *", + "comment": null + }, + { + "name": "src", + "type": "const git_filter_source *", + "comment": null + } + ], + "argline": "git_filter *self, void **payload, git_buf *to, const git_buf *from, const git_filter_source *src", + "sig": "git_filter *::void **::git_buf *::const git_buf *::const git_filter_source *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Callback to actually perform the data filtering

\n", + "comments": "

Specified as filter.apply, this is the callback that actually filters data. If it successfully writes the output, it should return 0. Like check, it can return GIT_PASSTHROUGH to indicate that the filter doesn't want to run. Other error codes will stop filter processing and return to the caller.

\n\n

The payload value will refer to any payload that was set by the check callback. It may be read from or written to as needed.

\n" + }, + "git_filter_cleanup_fn": { + "type": "callback", + "file": "sys/filter.h", + "line": 215, + "lineto": 217, + "args": [ + { + "name": "self", + "type": "git_filter *", + "comment": null + }, + { + "name": "payload", + "type": "void *", + "comment": null + } + ], + "argline": "git_filter *self, void *payload", + "sig": "git_filter *::void *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Callback to clean up after filtering has been applied

\n", + "comments": "

Specified as filter.cleanup, this is an optional callback invoked after the filter has been applied. If the check or apply callbacks allocated a payload to keep per-source filter state, use this callback to free that payload and release resources as required.

\n" + }, + "git_merge_driver_init_fn": { + "type": "callback", + "file": "sys/merge.h", + "line": 71, + "lineto": 71, + "args": [ + { + "name": "self", + "type": "int *", + "comment": null + } + ], + "argline": "int *self", + "sig": "int *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Initialize callback on merge driver

\n", + "comments": "

Specified as driver.initialize, this is an optional callback invoked before a merge driver is first used. It will be called once at most per library lifetime.

\n\n

If non-NULL, the merge driver's initialize callback will be invoked right before the first use of the driver, so you can defer expensive initialization operations (in case libgit2 is being used in a way that doesn't need the merge driver).

\n" + }, + "git_merge_driver_shutdown_fn": { + "type": "callback", + "file": "sys/merge.h", + "line": 83, + "lineto": 83, + "args": [ + { + "name": "self", + "type": "int *", + "comment": null + } + ], + "argline": "int *self", + "sig": "int *", + "return": { + "type": "void", + "comment": null + }, + "description": "

Shutdown callback on merge driver

\n", + "comments": "

Specified as driver.shutdown, this is an optional callback invoked when the merge driver is unregistered or when libgit2 is shutting down. It will be called once at most and should release resources as needed. This may be called even if the initialize callback was not made.

\n\n

Typically this function will free the git_merge_driver object itself.

\n" + }, + "git_merge_driver_apply_fn": { + "type": "callback", + "file": "sys/merge.h", + "line": 103, + "lineto": 109, + "args": [ + { + "name": "self", + "type": "int *", + "comment": null + }, + { + "name": "path_out", + "type": "const char **", + "comment": null + }, + { + "name": "mode_out", + "type": "int *", + "comment": null + }, + { + "name": "merged_out", + "type": "int *", + "comment": null + }, + { + "name": "filter_name", + "type": "const char *", + "comment": null + }, + { + "name": "src", + "type": "const git_merge_driver_source *", + "comment": null + } + ], + "argline": "int *self, const char **path_out, int *mode_out, int *merged_out, const char *filter_name, const git_merge_driver_source *src", + "sig": "int *::const char **::int *::int *::const char *::const git_merge_driver_source *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Callback to perform the merge.

\n", + "comments": "

Specified as driver.apply, this is the callback that actually does the merge. If it can successfully perform a merge, it should populate path_out with a pointer to the filename to accept, mode_out with the resultant mode, and merged_out with the buffer of the merged file and then return 0. If the driver returns GIT_PASSTHROUGH, then the default merge driver should instead be run. It can also return GIT_EMERGECONFLICT if the driver is not able to produce a merge result, and the file will remain conflicted. Any other errors will fail and return to the caller.

\n\n

The filter_name contains the name of the filter that was invoked, as specified by the file's attributes.

\n\n

The src contains the data about the file to be merged.

\n" + }, + "git_trace_callback": { + "type": "callback", + "file": "trace.h", + "line": 52, + "lineto": 52, + "args": [ + { + "name": "level", + "type": "git_trace_level_t", + "comment": null + }, + { + "name": "msg", + "type": "const char *", + "comment": null + } + ], + "argline": "git_trace_level_t level, const char *msg", + "sig": "git_trace_level_t::const char *", + "return": { + "type": "void", + "comment": null + }, + "description": "

An instance for a tracing function

\n", + "comments": "" + }, + "git_transport_cb": { + "type": "callback", + "file": "transport.h", + "line": 24, + "lineto": 24, + "args": [ + { + "name": "out", + "type": "git_transport **", + "comment": null + }, + { + "name": "owner", + "type": "git_remote *", + "comment": null + }, + { + "name": "param", + "type": "void *", + "comment": null + } + ], + "argline": "git_transport **out, git_remote *owner, void *param", + "sig": "git_transport **::git_remote *::void *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Signature of a function which creates a transport

\n", + "comments": "" + }, + "git_cred_acquire_cb": { + "type": "callback", + "file": "transport.h", + "line": 333, + "lineto": 338, + "args": [ + { + "name": "cred", + "type": "git_cred **", + "comment": null + }, + { + "name": "url", + "type": "const char *", + "comment": null + }, + { + "name": "username_from_url", + "type": "const char *", + "comment": null + }, + { + "name": "allowed_types", + "type": "unsigned int", + "comment": null + }, + { + "name": "payload", + "type": "void *", + "comment": null + } + ], + "argline": "git_cred **cred, const char *url, const char *username_from_url, unsigned int allowed_types, void *payload", + "sig": "git_cred **::const char *::const char *::unsigned int::void *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Signature of a function which acquires a credential object.

\n", + "comments": "
    \n
  • cred: The newly created credential object. - url: The resource for which we are demanding a credential. - username_from_url: The username that was embedded in a "user@host" remote url, or NULL if not included. - allowed_types: A bitmask stating which cred types are OK to return. - payload: The payload provided when specifying this callback. - returns 0 for success, < 0 to indicate an error, > 0 to indicate no credential was acquired
  • \n
\n" + }, + "git_treebuilder_filter_cb": { + "type": "callback", + "file": "tree.h", + "line": 346, + "lineto": 347, + "args": [ + { + "name": "entry", + "type": "const git_tree_entry *", + "comment": null + }, + { + "name": "payload", + "type": "void *", + "comment": null + } + ], + "argline": "const git_tree_entry *entry, void *payload", + "sig": "const git_tree_entry *::void *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Callback for git_treebuilder_filter

\n", + "comments": "

The return value is treated as a boolean, with zero indicating that the entry should be left alone and any non-zero value meaning that the entry should be removed from the treebuilder list (i.e. filtered out).

\n" + }, + "git_treewalk_cb": { + "type": "callback", + "file": "tree.h", + "line": 380, + "lineto": 381, + "args": [ + { + "name": "root", + "type": "const char *", + "comment": null + }, + { + "name": "entry", + "type": "const git_tree_entry *", + "comment": null + }, + { + "name": "payload", + "type": "void *", + "comment": null + } + ], + "argline": "const char *root, const git_tree_entry *entry, void *payload", + "sig": "const char *::const git_tree_entry *::void *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Callback for the tree traversal method

\n", + "comments": "" + }, + "git_transfer_progress_cb": { + "type": "callback", + "file": "types.h", + "line": 270, + "lineto": 270, + "args": [ + { + "name": "stats", + "type": "const git_transfer_progress *", + "comment": "Structure containing information about the state of the transfer" + }, + { + "name": "payload", + "type": "void *", + "comment": "Payload provided by caller" + } + ], + "argline": "const git_transfer_progress *stats, void *payload", + "sig": "const git_transfer_progress *::void *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Type for progress callbacks during indexing. Return a value less than zero\n to cancel the transfer.

\n", + "comments": "" + }, + "git_transport_message_cb": { + "type": "callback", + "file": "types.h", + "line": 280, + "lineto": 280, + "args": [ + { + "name": "str", + "type": "const char *", + "comment": "The message from the transport" + }, + { + "name": "len", + "type": "int", + "comment": "The length of the message" + }, + { + "name": "payload", + "type": "void *", + "comment": "Payload provided by the caller" + } + ], + "argline": "const char *str, int len, void *payload", + "sig": "const char *::int::void *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Type for messages delivered by the transport. Return a negative value\n to cancel the network operation.

\n", + "comments": "" + }, + "git_transport_certificate_check_cb": { + "type": "callback", + "file": "types.h", + "line": 330, + "lineto": 330, + "args": [ + { + "name": "cert", + "type": "git_cert *", + "comment": "The host certificate" + }, + { + "name": "valid", + "type": "int", + "comment": "Whether the libgit2 checks (OpenSSL or WinHTTP) think\n this certificate is valid" + }, + { + "name": "host", + "type": "const char *", + "comment": "Hostname of the host libgit2 connected to" + }, + { + "name": "payload", + "type": "void *", + "comment": "Payload provided by the caller" + } + ], + "argline": "git_cert *cert, int valid, const char *host, void *payload", + "sig": "git_cert *::int::const char *::void *", + "return": { + "type": "int", + "comment": null + }, + "description": "

Callback for the user's custom certificate checks.

\n", + "comments": "" + } + }, + "globals": {}, + "types": [ + [ + "git_annotated_commit", + { + "decl": "git_annotated_commit", + "type": "struct", + "value": "git_annotated_commit", + "file": "types.h", + "line": 178, + "lineto": 178, + "tdef": "typedef", + "description": " Annotated commits, the input to merge and rebase. ", + "comments": "", + "used": { + "returns": [], + "needs": [ + "git_annotated_commit_free", + "git_annotated_commit_from_fetchhead", + "git_annotated_commit_from_ref", + "git_annotated_commit_from_revspec", + "git_annotated_commit_id", + "git_annotated_commit_lookup", + "git_branch_create_from_annotated", + "git_merge", + "git_merge_analysis", + "git_rebase_init", + "git_repository_set_head_detached_from_annotated", + "git_reset_from_annotated" + ] + } + } + ], + [ + "git_attr_t", + { + "decl": [ + "GIT_ATTR_UNSPECIFIED_T", + "GIT_ATTR_TRUE_T", + "GIT_ATTR_FALSE_T", + "GIT_ATTR_VALUE_T" + ], + "type": "enum", + "file": "attr.h", + "line": 82, + "lineto": 87, + "block": "GIT_ATTR_UNSPECIFIED_T\nGIT_ATTR_TRUE_T\nGIT_ATTR_FALSE_T\nGIT_ATTR_VALUE_T", + "tdef": "typedef", + "description": " Possible states for an attribute", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_ATTR_UNSPECIFIED_T", + "comments": "

The attribute has been left unspecified

\n", + "value": 0 + }, + { + "type": "int", + "name": "GIT_ATTR_TRUE_T", + "comments": "

The attribute has been set

\n", + "value": 1 + }, + { + "type": "int", + "name": "GIT_ATTR_FALSE_T", + "comments": "

The attribute has been unset

\n", + "value": 2 + }, + { + "type": "int", + "name": "GIT_ATTR_VALUE_T", + "comments": "

This attribute has a value

\n", + "value": 3 + } + ], + "used": { + "returns": [ + "git_attr_value" + ], + "needs": [] + } + } + ], + [ + "git_blame_flag_t", + { + "decl": [ + "GIT_BLAME_NORMAL", + "GIT_BLAME_TRACK_COPIES_SAME_FILE", + "GIT_BLAME_TRACK_COPIES_SAME_COMMIT_MOVES", + "GIT_BLAME_TRACK_COPIES_SAME_COMMIT_COPIES", + "GIT_BLAME_TRACK_COPIES_ANY_COMMIT_COPIES", + "GIT_BLAME_FIRST_PARENT" + ], + "type": "enum", + "file": "blame.h", + "line": 26, + "lineto": 46, + "block": "GIT_BLAME_NORMAL\nGIT_BLAME_TRACK_COPIES_SAME_FILE\nGIT_BLAME_TRACK_COPIES_SAME_COMMIT_MOVES\nGIT_BLAME_TRACK_COPIES_SAME_COMMIT_COPIES\nGIT_BLAME_TRACK_COPIES_ANY_COMMIT_COPIES\nGIT_BLAME_FIRST_PARENT", + "tdef": "typedef", + "description": " Flags for indicating option behavior for git_blame APIs.", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_BLAME_NORMAL", + "comments": "

Normal blame, the default

\n", + "value": 0 + }, + { + "type": "int", + "name": "GIT_BLAME_TRACK_COPIES_SAME_FILE", + "comments": "

Track lines that have moved within a file (like git blame -M).\n NOT IMPLEMENTED.

\n", + "value": 1 + }, + { + "type": "int", + "name": "GIT_BLAME_TRACK_COPIES_SAME_COMMIT_MOVES", + "comments": "

Track lines that have moved across files in the same commit (like git blame -C).\n NOT IMPLEMENTED.

\n", + "value": 2 + }, + { + "type": "int", + "name": "GIT_BLAME_TRACK_COPIES_SAME_COMMIT_COPIES", + "comments": "

Track lines that have been copied from another file that exists in the\n same commit (like git blame -CC). Implies SAME_FILE.\n NOT IMPLEMENTED.

\n", + "value": 4 + }, + { + "type": "int", + "name": "GIT_BLAME_TRACK_COPIES_ANY_COMMIT_COPIES", + "comments": "

Track lines that have been copied from another file that exists in any\n commit (like git blame -CCC). Implies SAME_COMMIT_COPIES.\n NOT IMPLEMENTED.

\n", + "value": 8 + }, + { + "type": "int", + "name": "GIT_BLAME_FIRST_PARENT", + "comments": "

Restrict the search of commits to those reachable following only the\n first parents.

\n", + "value": 16 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_blame_hunk", + { + "decl": [ + "size_t lines_in_hunk", + "git_oid final_commit_id", + "size_t final_start_line_number", + "git_signature * final_signature", + "git_oid orig_commit_id", + "const char * orig_path", + "size_t orig_start_line_number", + "git_signature * orig_signature", + "char boundary" + ], + "type": "struct", + "value": "git_blame_hunk", + "file": "blame.h", + "line": 115, + "lineto": 128, + "block": "size_t lines_in_hunk\ngit_oid final_commit_id\nsize_t final_start_line_number\ngit_signature * final_signature\ngit_oid orig_commit_id\nconst char * orig_path\nsize_t orig_start_line_number\ngit_signature * orig_signature\nchar boundary", + "tdef": "typedef", + "description": " Structure that represents a blame hunk.", + "comments": "
    \n
  • lines_in_hunk is the number of lines in this hunk - final_commit_id is the OID of the commit where this line was last changed. - final_start_line_number is the 1-based line number where this hunk begins, in the final version of the file - orig_commit_id is the OID of the commit where this hunk was found. This will usually be the same as final_commit_id, except when GIT_BLAME_TRACK_COPIES_ANY_COMMIT_COPIES has been specified. - orig_path is the path to the file where this hunk originated, as of the commit specified by orig_commit_id. - orig_start_line_number is the 1-based line number where this hunk begins in the file named by orig_path in the commit specified by orig_commit_id. - boundary is 1 iff the hunk has been tracked to a boundary commit (the root, or the commit specified in git_blame_options.oldest_commit)
  • \n
\n", + "fields": [ + { + "type": "size_t", + "name": "lines_in_hunk", + "comments": "" + }, + { + "type": "git_oid", + "name": "final_commit_id", + "comments": "" + }, + { + "type": "size_t", + "name": "final_start_line_number", + "comments": "" + }, + { + "type": "git_signature *", + "name": "final_signature", + "comments": "" + }, + { + "type": "git_oid", + "name": "orig_commit_id", + "comments": "" + }, + { + "type": "const char *", + "name": "orig_path", + "comments": "" + }, + { + "type": "size_t", + "name": "orig_start_line_number", + "comments": "" + }, + { + "type": "git_signature *", + "name": "orig_signature", + "comments": "" + }, + { + "type": "char", + "name": "boundary", + "comments": "" + } + ], + "used": { + "returns": [ + "git_blame_get_hunk_byindex", + "git_blame_get_hunk_byline" + ], + "needs": [] + } + } + ], + [ + "git_blame_options", + { + "decl": [ + "unsigned int version", + "uint32_t flags", + "uint16_t min_match_characters", + "git_oid newest_commit", + "git_oid oldest_commit", + "size_t min_line", + "size_t max_line" + ], + "type": "struct", + "value": "git_blame_options", + "file": "blame.h", + "line": 70, + "lineto": 79, + "block": "unsigned int version\nuint32_t flags\nuint16_t min_match_characters\ngit_oid newest_commit\ngit_oid oldest_commit\nsize_t min_line\nsize_t max_line", + "tdef": "typedef", + "description": " Blame options structure", + "comments": "

Use zeros to indicate default settings. It's easiest to use the GIT_BLAME_OPTIONS_INIT macro: git_blame_options opts = GIT_BLAME_OPTIONS_INIT;

\n\n
    \n
  • flags is a combination of the git_blame_flag_t values above. - min_match_characters is the lower bound on the number of alphanumeric characters that must be detected as moving/copying within a file for it to associate those lines with the parent commit. The default value is 20. This value only takes effect if any of the GIT_BLAME_TRACK_COPIES_* flags are specified. - newest_commit is the id of the newest commit to consider. The default is HEAD. - oldest_commit is the id of the oldest commit to consider. The default is the first commit encountered with a NULL parent. - min_line is the first line in the file to blame. The default is 1 (line numbers start with 1). - max_line is the last line in the file to blame. The default is the last line of the file.
  • \n
\n", + "fields": [ + { + "type": "unsigned int", + "name": "version", + "comments": "" + }, + { + "type": "uint32_t", + "name": "flags", + "comments": "" + }, + { + "type": "uint16_t", + "name": "min_match_characters", + "comments": "" + }, + { + "type": "git_oid", + "name": "newest_commit", + "comments": "" + }, + { + "type": "git_oid", + "name": "oldest_commit", + "comments": "" + }, + { + "type": "size_t", + "name": "min_line", + "comments": "" + }, + { + "type": "size_t", + "name": "max_line", + "comments": "" + } + ], + "used": { + "returns": [], + "needs": [ + "git_blame_file", + "git_blame_init_options" + ] + } + } + ], + [ + "git_blob", + { + "decl": "git_blob", + "type": "struct", + "value": "git_blob", + "file": "types.h", + "line": 117, + "lineto": 117, + "tdef": "typedef", + "description": " In-memory representation of a blob object. ", + "comments": "", + "used": { + "returns": [], + "needs": [ + "git_blob_create_fromchunks", + "git_blob_dup", + "git_blob_filtered_content", + "git_blob_free", + "git_blob_id", + "git_blob_is_binary", + "git_blob_lookup", + "git_blob_lookup_prefix", + "git_blob_owner", + "git_blob_rawcontent", + "git_blob_rawsize", + "git_diff_blob_to_buffer", + "git_diff_blobs", + "git_filter_list_apply_to_blob", + "git_filter_list_load", + "git_filter_list_stream_blob", + "git_patch_from_blob_and_buffer", + "git_patch_from_blobs" + ] + } + } + ], + [ + "git_branch_iterator", + { + "decl": "git_branch_iterator", + "type": "struct", + "value": "git_branch_iterator", + "file": "branch.h", + "line": 88, + "lineto": 88, + "tdef": "typedef", + "description": " Iterator type for branches ", + "comments": "", + "used": { + "returns": [], + "needs": [ + "git_branch_iterator_free", + "git_branch_iterator_new", + "git_branch_next" + ] + } + } + ], + [ + "git_branch_t", + { + "decl": [ + "GIT_BRANCH_LOCAL", + "GIT_BRANCH_REMOTE", + "GIT_BRANCH_ALL" + ], + "type": "enum", + "file": "types.h", + "line": 198, + "lineto": 202, + "block": "GIT_BRANCH_LOCAL\nGIT_BRANCH_REMOTE\nGIT_BRANCH_ALL", + "tdef": "typedef", + "description": " Basic type of any Git branch. ", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_BRANCH_LOCAL", + "comments": "", + "value": 1 + }, + { + "type": "int", + "name": "GIT_BRANCH_REMOTE", + "comments": "", + "value": 2 + }, + { + "type": "int", + "name": "GIT_BRANCH_ALL", + "comments": "", + "value": 3 + } + ], + "used": { + "returns": [], + "needs": [ + "git_branch_iterator_new", + "git_branch_lookup", + "git_branch_next" + ] + } + } + ], + [ + "git_buf", + { + "decl": [ + "char * ptr", + "size_t asize", + "size_t size" + ], + "type": "struct", + "value": "git_buf", + "file": "buffer.h", + "line": 52, + "lineto": 55, + "block": "char * ptr\nsize_t asize\nsize_t size", + "tdef": "typedef", + "description": " A data buffer for exporting data from libgit2", + "comments": "

Sometimes libgit2 wants to return an allocated data buffer to the caller and have the caller take responsibility for freeing that memory. This can be awkward if the caller does not have easy access to the same allocation functions that libgit2 is using. In those cases, libgit2 will fill in a git_buf and the caller can use git_buf_free() to release it when they are done.

\n\n

A git_buf may also be used for the caller to pass in a reference to a block of memory they hold. In this case, libgit2 will not resize or free the memory, but will read from it as needed.

\n\n

A git_buf is a public structure with three fields:

\n\n
    \n
  • ptr points to the start of the allocated memory. If it is NULL, then the git_buf is considered empty and libgit2 will feel free to overwrite it with new data.

  • \n
  • size holds the size (in bytes) of the data that is actually used.

  • \n
  • asize holds the known total amount of allocated memory if the ptr was allocated by libgit2. It may be larger than size. If ptr was not allocated by libgit2 and should not be resized and/or freed, then asize will be set to zero.

  • \n
\n\n

Some APIs may occasionally do something slightly unusual with a buffer, such as setting ptr to a value that was passed in by the user. In those cases, the behavior will be clearly documented by the API.

\n", + "fields": [ + { + "type": "char *", + "name": "ptr", + "comments": "" + }, + { + "type": "size_t", + "name": "asize", + "comments": "" + }, + { + "type": "size_t", + "name": "size", + "comments": "" + } + ], + "used": { + "returns": [], + "needs": [ + "git_blob_filtered_content", + "git_buf_contains_nul", + "git_buf_free", + "git_buf_grow", + "git_buf_is_binary", + "git_buf_set", + "git_commit_create_buffer", + "git_commit_extract_signature", + "git_commit_header_field", + "git_config_find_global", + "git_config_find_programdata", + "git_config_find_system", + "git_config_find_xdg", + "git_config_get_path", + "git_config_get_string_buf", + "git_config_parse_path", + "git_describe_format", + "git_diff_commit_as_email", + "git_diff_format_email", + "git_diff_stats_to_buf", + "git_filter_apply_fn", + "git_filter_list_apply_to_blob", + "git_filter_list_apply_to_data", + "git_filter_list_apply_to_file", + "git_filter_list_stream_data", + "git_message_prettify", + "git_object_short_id", + "git_patch_to_buf", + "git_refspec_rtransform", + "git_refspec_transform", + "git_remote_default_branch", + "git_repository_discover", + "git_repository_message", + "git_submodule_resolve_url" + ] + } + } + ], + [ + "git_cert", + { + "decl": [ + "git_cert_t cert_type" + ], + "type": "struct", + "value": "git_cert", + "file": "types.h", + "line": 314, + "lineto": 319, + "block": "git_cert_t cert_type", + "tdef": "typedef", + "description": " Parent type for `git_cert_hostkey` and `git_cert_x509`.", + "comments": "", + "fields": [ + { + "type": "git_cert_t", + "name": "cert_type", + "comments": " Type of certificate. A `GIT_CERT_` value." + } + ], + "used": { + "returns": [], + "needs": [ + "git_transport_certificate_check_cb", + "git_transport_smart_certificate_check" + ] + } + } + ], + [ + "git_cert_hostkey", + { + "decl": [ + "git_cert parent", + "git_cert_ssh_t type", + "unsigned char [16] hash_md5", + "unsigned char [20] hash_sha1" + ], + "type": "struct", + "value": "git_cert_hostkey", + "file": "transport.h", + "line": 39, + "lineto": 59, + "block": "git_cert parent\ngit_cert_ssh_t type\nunsigned char [16] hash_md5\nunsigned char [20] hash_sha1", + "tdef": "typedef", + "description": " Hostkey information taken from libssh2", + "comments": "", + "fields": [ + { + "type": "git_cert", + "name": "parent", + "comments": "" + }, + { + "type": "git_cert_ssh_t", + "name": "type", + "comments": " A hostkey type from libssh2, either\n `GIT_CERT_SSH_MD5` or `GIT_CERT_SSH_SHA1`" + }, + { + "type": "unsigned char [16]", + "name": "hash_md5", + "comments": " Hostkey hash. If type has `GIT_CERT_SSH_MD5` set, this will\n have the MD5 hash of the hostkey." + }, + { + "type": "unsigned char [20]", + "name": "hash_sha1", + "comments": " Hostkey hash. If type has `GIT_CERT_SSH_SHA1` set, this will\n have the SHA-1 hash of the hostkey." + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_cert_ssh_t", + { + "decl": [ + "GIT_CERT_SSH_MD5", + "GIT_CERT_SSH_SHA1" + ], + "type": "enum", + "file": "transport.h", + "line": 29, + "lineto": 34, + "block": "GIT_CERT_SSH_MD5\nGIT_CERT_SSH_SHA1", + "tdef": "typedef", + "description": " Type of SSH host fingerprint", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_CERT_SSH_MD5", + "comments": "

MD5 is available

\n", + "value": 1 + }, + { + "type": "int", + "name": "GIT_CERT_SSH_SHA1", + "comments": "

SHA-1 is available

\n", + "value": 2 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_cert_t", + { + "decl": [ + "GIT_CERT_NONE", + "GIT_CERT_X509", + "GIT_CERT_HOSTKEY_LIBSSH2", + "GIT_CERT_STRARRAY" + ], + "type": "enum", + "file": "types.h", + "line": 286, + "lineto": 309, + "block": "GIT_CERT_NONE\nGIT_CERT_X509\nGIT_CERT_HOSTKEY_LIBSSH2\nGIT_CERT_STRARRAY\nGIT_CERT_NONE\nGIT_CERT_X509\nGIT_CERT_HOSTKEY_LIBSSH2\nGIT_CERT_STRARRAY", + "tdef": "typedef", + "description": " Type of host certificate structure that is passed to the check callback", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_CERT_NONE", + "comments": "

No information about the certificate is available. This may\n happen when using curl.

\n", + "value": 0 + }, + { + "type": "int", + "name": "GIT_CERT_X509", + "comments": "

The data argument to the callback will be a pointer to\n the DER-encoded data.

\n", + "value": 1 + }, + { + "type": "int", + "name": "GIT_CERT_HOSTKEY_LIBSSH2", + "comments": "

The data argument to the callback will be a pointer to a\n git_cert_hostkey structure.

\n", + "value": 2 + }, + { + "type": "int", + "name": "GIT_CERT_STRARRAY", + "comments": "

The data argument to the callback will be a pointer to a\n git_strarray with name:content strings containing\n information about the certificate. This is used when using\n curl.

\n", + "value": 3 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_cert_x509", + { + "decl": [ + "git_cert parent", + "void * data", + "size_t len" + ], + "type": "struct", + "value": "git_cert_x509", + "file": "transport.h", + "line": 64, + "lineto": 74, + "block": "git_cert parent\nvoid * data\nsize_t len", + "tdef": "typedef", + "description": " X.509 certificate information", + "comments": "", + "fields": [ + { + "type": "git_cert", + "name": "parent", + "comments": "" + }, + { + "type": "void *", + "name": "data", + "comments": " Pointer to the X.509 certificate data" + }, + { + "type": "size_t", + "name": "len", + "comments": " Length of the memory block pointed to by `data`." + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_checkout_notify_t", + { + "decl": [ + "GIT_CHECKOUT_NOTIFY_NONE", + "GIT_CHECKOUT_NOTIFY_CONFLICT", + "GIT_CHECKOUT_NOTIFY_DIRTY", + "GIT_CHECKOUT_NOTIFY_UPDATED", + "GIT_CHECKOUT_NOTIFY_UNTRACKED", + "GIT_CHECKOUT_NOTIFY_IGNORED", + "GIT_CHECKOUT_NOTIFY_ALL" + ], + "type": "enum", + "file": "checkout.h", + "line": 205, + "lineto": 214, + "block": "GIT_CHECKOUT_NOTIFY_NONE\nGIT_CHECKOUT_NOTIFY_CONFLICT\nGIT_CHECKOUT_NOTIFY_DIRTY\nGIT_CHECKOUT_NOTIFY_UPDATED\nGIT_CHECKOUT_NOTIFY_UNTRACKED\nGIT_CHECKOUT_NOTIFY_IGNORED\nGIT_CHECKOUT_NOTIFY_ALL", + "tdef": "typedef", + "description": " Checkout notification flags", + "comments": "

Checkout will invoke an options notification callback (notify_cb) for certain cases - you pick which ones via notify_flags:

\n\n
    \n
  • GIT_CHECKOUT_NOTIFY_CONFLICT invokes checkout on conflicting paths.

  • \n
  • GIT_CHECKOUT_NOTIFY_DIRTY notifies about "dirty" files, i.e. those that do not need an update but no longer match the baseline. Core git displays these files when checkout runs, but won't stop the checkout.

  • \n
  • GIT_CHECKOUT_NOTIFY_UPDATED sends notification for any file changed.

  • \n
  • GIT_CHECKOUT_NOTIFY_UNTRACKED notifies about untracked files.

  • \n
  • GIT_CHECKOUT_NOTIFY_IGNORED notifies about ignored files.

  • \n
\n\n

Returning a non-zero value from this callback will cancel the checkout. The non-zero return value will be propagated back and returned by the git_checkout_... call.

\n\n

Notification callbacks are made prior to modifying any files on disk, so canceling on any notification will still happen prior to any files being modified.

\n", + "fields": [ + { + "type": "int", + "name": "GIT_CHECKOUT_NOTIFY_NONE", + "comments": "", + "value": 0 + }, + { + "type": "int", + "name": "GIT_CHECKOUT_NOTIFY_CONFLICT", + "comments": "", + "value": 1 + }, + { + "type": "int", + "name": "GIT_CHECKOUT_NOTIFY_DIRTY", + "comments": "", + "value": 2 + }, + { + "type": "int", + "name": "GIT_CHECKOUT_NOTIFY_UPDATED", + "comments": "", + "value": 4 + }, + { + "type": "int", + "name": "GIT_CHECKOUT_NOTIFY_UNTRACKED", + "comments": "", + "value": 8 + }, + { + "type": "int", + "name": "GIT_CHECKOUT_NOTIFY_IGNORED", + "comments": "", + "value": 16 + }, + { + "type": "int", + "name": "GIT_CHECKOUT_NOTIFY_ALL", + "comments": "", + "value": 65535 + } + ], + "used": { + "returns": [], + "needs": [ + "git_checkout_notify_cb" + ] + } + } + ], + [ + "git_checkout_options", + { + "decl": [ + "unsigned int version", + "unsigned int checkout_strategy", + "int disable_filters", + "unsigned int dir_mode", + "unsigned int file_mode", + "int file_open_flags", + "unsigned int notify_flags", + "git_checkout_notify_cb notify_cb", + "void * notify_payload", + "git_checkout_progress_cb progress_cb", + "void * progress_payload", + "git_strarray paths", + "git_tree * baseline", + "git_index * baseline_index", + "const char * target_directory", + "const char * ancestor_label", + "const char * our_label", + "const char * their_label", + "git_checkout_perfdata_cb perfdata_cb", + "void * perfdata_payload" + ], + "type": "struct", + "value": "git_checkout_options", + "file": "checkout.h", + "line": 251, + "lineto": 295, + "block": "unsigned int version\nunsigned int checkout_strategy\nint disable_filters\nunsigned int dir_mode\nunsigned int file_mode\nint file_open_flags\nunsigned int notify_flags\ngit_checkout_notify_cb notify_cb\nvoid * notify_payload\ngit_checkout_progress_cb progress_cb\nvoid * progress_payload\ngit_strarray paths\ngit_tree * baseline\ngit_index * baseline_index\nconst char * target_directory\nconst char * ancestor_label\nconst char * our_label\nconst char * their_label\ngit_checkout_perfdata_cb perfdata_cb\nvoid * perfdata_payload", + "tdef": "typedef", + "description": " Checkout options structure", + "comments": "

Zero out for defaults. Initialize with GIT_CHECKOUT_OPTIONS_INIT macro to correctly set the version field. E.g.

\n\n
    git_checkout_options opts = GIT_CHECKOUT_OPTIONS_INIT;\n
\n", + "fields": [ + { + "type": "unsigned int", + "name": "version", + "comments": "" + }, + { + "type": "unsigned int", + "name": "checkout_strategy", + "comments": " default will be a dry run " + }, + { + "type": "int", + "name": "disable_filters", + "comments": " don't apply filters like CRLF conversion " + }, + { + "type": "unsigned int", + "name": "dir_mode", + "comments": " default is 0755 " + }, + { + "type": "unsigned int", + "name": "file_mode", + "comments": " default is 0644 or 0755 as dictated by blob " + }, + { + "type": "int", + "name": "file_open_flags", + "comments": " default is O_CREAT | O_TRUNC | O_WRONLY " + }, + { + "type": "unsigned int", + "name": "notify_flags", + "comments": " see `git_checkout_notify_t` above " + }, + { + "type": "git_checkout_notify_cb", + "name": "notify_cb", + "comments": "" + }, + { + "type": "void *", + "name": "notify_payload", + "comments": "" + }, + { + "type": "git_checkout_progress_cb", + "name": "progress_cb", + "comments": " Optional callback to notify the consumer of checkout progress. " + }, + { + "type": "void *", + "name": "progress_payload", + "comments": "" + }, + { + "type": "git_strarray", + "name": "paths", + "comments": " When not zeroed out, array of fnmatch patterns specifying which\n paths should be taken into account, otherwise all files. Use\n GIT_CHECKOUT_DISABLE_PATHSPEC_MATCH to treat as simple list." + }, + { + "type": "git_tree *", + "name": "baseline", + "comments": " The expected content of the working directory; defaults to HEAD.\n If the working directory does not match this baseline information,\n that will produce a checkout conflict." + }, + { + "type": "git_index *", + "name": "baseline_index", + "comments": " expected content of workdir, expressed as an index. " + }, + { + "type": "const char *", + "name": "target_directory", + "comments": " alternative checkout path to workdir " + }, + { + "type": "const char *", + "name": "ancestor_label", + "comments": " the name of the common ancestor side of conflicts " + }, + { + "type": "const char *", + "name": "our_label", + "comments": " the name of the \"our\" side of conflicts " + }, + { + "type": "const char *", + "name": "their_label", + "comments": " the name of the \"their\" side of conflicts " + }, + { + "type": "git_checkout_perfdata_cb", + "name": "perfdata_cb", + "comments": " Optional callback to notify the consumer of performance data. " + }, + { + "type": "void *", + "name": "perfdata_payload", + "comments": "" + } + ], + "used": { + "returns": [], + "needs": [ + "git_checkout_head", + "git_checkout_index", + "git_checkout_init_options", + "git_checkout_tree", + "git_merge", + "git_reset", + "git_reset_from_annotated" + ] + } + } + ], + [ + "git_checkout_strategy_t", + { + "decl": [ + "GIT_CHECKOUT_NONE", + "GIT_CHECKOUT_SAFE", + "GIT_CHECKOUT_FORCE", + "GIT_CHECKOUT_RECREATE_MISSING", + "GIT_CHECKOUT_ALLOW_CONFLICTS", + "GIT_CHECKOUT_REMOVE_UNTRACKED", + "GIT_CHECKOUT_REMOVE_IGNORED", + "GIT_CHECKOUT_UPDATE_ONLY", + "GIT_CHECKOUT_DONT_UPDATE_INDEX", + "GIT_CHECKOUT_NO_REFRESH", + "GIT_CHECKOUT_SKIP_UNMERGED", + "GIT_CHECKOUT_USE_OURS", + "GIT_CHECKOUT_USE_THEIRS", + "GIT_CHECKOUT_DISABLE_PATHSPEC_MATCH", + "GIT_CHECKOUT_SKIP_LOCKED_DIRECTORIES", + "GIT_CHECKOUT_DONT_OVERWRITE_IGNORED", + "GIT_CHECKOUT_CONFLICT_STYLE_MERGE", + "GIT_CHECKOUT_CONFLICT_STYLE_DIFF3", + "GIT_CHECKOUT_DONT_REMOVE_EXISTING", + "GIT_CHECKOUT_DONT_WRITE_INDEX", + "GIT_CHECKOUT_UPDATE_SUBMODULES", + "GIT_CHECKOUT_UPDATE_SUBMODULES_IF_CHANGED" + ], + "type": "enum", + "file": "checkout.h", + "line": 106, + "lineto": 177, + "block": "GIT_CHECKOUT_NONE\nGIT_CHECKOUT_SAFE\nGIT_CHECKOUT_FORCE\nGIT_CHECKOUT_RECREATE_MISSING\nGIT_CHECKOUT_ALLOW_CONFLICTS\nGIT_CHECKOUT_REMOVE_UNTRACKED\nGIT_CHECKOUT_REMOVE_IGNORED\nGIT_CHECKOUT_UPDATE_ONLY\nGIT_CHECKOUT_DONT_UPDATE_INDEX\nGIT_CHECKOUT_NO_REFRESH\nGIT_CHECKOUT_SKIP_UNMERGED\nGIT_CHECKOUT_USE_OURS\nGIT_CHECKOUT_USE_THEIRS\nGIT_CHECKOUT_DISABLE_PATHSPEC_MATCH\nGIT_CHECKOUT_SKIP_LOCKED_DIRECTORIES\nGIT_CHECKOUT_DONT_OVERWRITE_IGNORED\nGIT_CHECKOUT_CONFLICT_STYLE_MERGE\nGIT_CHECKOUT_CONFLICT_STYLE_DIFF3\nGIT_CHECKOUT_DONT_REMOVE_EXISTING\nGIT_CHECKOUT_DONT_WRITE_INDEX\nGIT_CHECKOUT_UPDATE_SUBMODULES\nGIT_CHECKOUT_UPDATE_SUBMODULES_IF_CHANGED", + "tdef": "typedef", + "description": " Checkout behavior flags", + "comments": "

In libgit2, checkout is used to update the working directory and index to match a target tree. Unlike git checkout, it does not move the HEAD commit for you - use git_repository_set_head or the like to do that.

\n\n

Checkout looks at (up to) four things: the "target" tree you want to check out, the "baseline" tree of what was checked out previously, the working directory for actual files, and the index for staged changes.

\n\n

You give checkout one of three strategies for update:

\n\n
    \n
  • GIT_CHECKOUT_NONE is a dry-run strategy that checks for conflicts, etc., but doesn't make any actual changes.

  • \n
  • GIT_CHECKOUT_FORCE is at the opposite extreme, taking any action to make the working directory match the target (including potentially discarding modified files).

  • \n
  • GIT_CHECKOUT_SAFE is between these two options, it will only make modifications that will not lose changes.

    \n\n
                     |  target == baseline   |  target != baseline  |    ---------------------|-----------------------|----------------------|     workdir == baseline |       no action       |  create, update, or  |                         |                       |     delete file      |    ---------------------|-----------------------|----------------------|     workdir exists and  |       no action       |   conflict (notify   |       is != baseline    | notify dirty MODIFIED | and cancel checkout) |    ---------------------|-----------------------|----------------------|      workdir missing,   | notify dirty DELETED  |     create file      |      baseline present   |                       |                      |    ---------------------|-----------------------|----------------------|\n
  • \n
\n\n

To emulate git checkout, use GIT_CHECKOUT_SAFE with a checkout notification callback (see below) that displays information about dirty files. The default behavior will cancel checkout on conflicts.

\n\n

To emulate git checkout-index, use GIT_CHECKOUT_SAFE with a notification callback that cancels the operation if a dirty-but-existing file is found in the working directory. This core git command isn't quite "force" but is sensitive about some types of changes.

\n\n

To emulate git checkout -f, use GIT_CHECKOUT_FORCE.

\n\n

There are some additional flags to modified the behavior of checkout:

\n\n
    \n
  • GIT_CHECKOUT_ALLOW_CONFLICTS makes SAFE mode apply safe file updates even if there are conflicts (instead of cancelling the checkout).

  • \n
  • GIT_CHECKOUT_REMOVE_UNTRACKED means remove untracked files (i.e. not in target, baseline, or index, and not ignored) from the working dir.

  • \n
  • GIT_CHECKOUT_REMOVE_IGNORED means remove ignored files (that are also untracked) from the working directory as well.

  • \n
  • GIT_CHECKOUT_UPDATE_ONLY means to only update the content of files that already exist. Files will not be created nor deleted. This just skips applying adds, deletes, and typechanges.

  • \n
  • GIT_CHECKOUT_DONT_UPDATE_INDEX prevents checkout from writing the updated files' information to the index.

  • \n
  • Normally, checkout will reload the index and git attributes from disk before any operations. GIT_CHECKOUT_NO_REFRESH prevents this reload.

  • \n
  • Unmerged index entries are conflicts. GIT_CHECKOUT_SKIP_UNMERGED skips files with unmerged index entries instead. GIT_CHECKOUT_USE_OURS and GIT_CHECKOUT_USE_THEIRS to proceed with the checkout using either the stage 2 ("ours") or stage 3 ("theirs") version of files in the index.

  • \n
  • GIT_CHECKOUT_DONT_OVERWRITE_IGNORED prevents ignored files from being overwritten. Normally, files that are ignored in the working directory are not considered "precious" and may be overwritten if the checkout target contains that file.

  • \n
  • GIT_CHECKOUT_DONT_REMOVE_EXISTING prevents checkout from removing files or folders that fold to the same name on case insensitive filesystems. This can cause files to retain their existing names and write through existing symbolic links.

  • \n
\n", + "fields": [ + { + "type": "int", + "name": "GIT_CHECKOUT_NONE", + "comments": "

default is a dry run, no actual updates

\n", + "value": 0 + }, + { + "type": "int", + "name": "GIT_CHECKOUT_SAFE", + "comments": "

Allow safe updates that cannot overwrite uncommitted data

\n", + "value": 1 + }, + { + "type": "int", + "name": "GIT_CHECKOUT_FORCE", + "comments": "

Allow all updates to force working directory to look like index

\n", + "value": 2 + }, + { + "type": "int", + "name": "GIT_CHECKOUT_RECREATE_MISSING", + "comments": "

Allow checkout to recreate missing files

\n", + "value": 4 + }, + { + "type": "int", + "name": "GIT_CHECKOUT_ALLOW_CONFLICTS", + "comments": "

Allow checkout to make safe updates even if conflicts are found

\n", + "value": 16 + }, + { + "type": "int", + "name": "GIT_CHECKOUT_REMOVE_UNTRACKED", + "comments": "

Remove untracked files not in index (that are not ignored)

\n", + "value": 32 + }, + { + "type": "int", + "name": "GIT_CHECKOUT_REMOVE_IGNORED", + "comments": "

Remove ignored files not in index

\n", + "value": 64 + }, + { + "type": "int", + "name": "GIT_CHECKOUT_UPDATE_ONLY", + "comments": "

Only update existing files, don't create new ones

\n", + "value": 128 + }, + { + "type": "int", + "name": "GIT_CHECKOUT_DONT_UPDATE_INDEX", + "comments": "

Normally checkout updates index entries as it goes; this stops that.\n Implies GIT_CHECKOUT_DONT_WRITE_INDEX.

\n", + "value": 256 + }, + { + "type": "int", + "name": "GIT_CHECKOUT_NO_REFRESH", + "comments": "

Don't refresh index/config/etc before doing checkout

\n", + "value": 512 + }, + { + "type": "int", + "name": "GIT_CHECKOUT_SKIP_UNMERGED", + "comments": "

Allow checkout to skip unmerged files

\n", + "value": 1024 + }, + { + "type": "int", + "name": "GIT_CHECKOUT_USE_OURS", + "comments": "

For unmerged files, checkout stage 2 from index

\n", + "value": 2048 + }, + { + "type": "int", + "name": "GIT_CHECKOUT_USE_THEIRS", + "comments": "

For unmerged files, checkout stage 3 from index

\n", + "value": 4096 + }, + { + "type": "int", + "name": "GIT_CHECKOUT_DISABLE_PATHSPEC_MATCH", + "comments": "

Treat pathspec as simple list of exact match file paths

\n", + "value": 8192 + }, + { + "type": "int", + "name": "GIT_CHECKOUT_SKIP_LOCKED_DIRECTORIES", + "comments": "

Ignore directories in use, they will be left empty

\n", + "value": 262144 + }, + { + "type": "int", + "name": "GIT_CHECKOUT_DONT_OVERWRITE_IGNORED", + "comments": "

Don't overwrite ignored files that exist in the checkout target

\n", + "value": 524288 + }, + { + "type": "int", + "name": "GIT_CHECKOUT_CONFLICT_STYLE_MERGE", + "comments": "

Write normal merge files for conflicts

\n", + "value": 1048576 + }, + { + "type": "int", + "name": "GIT_CHECKOUT_CONFLICT_STYLE_DIFF3", + "comments": "

Include common ancestor data in diff3 format files for conflicts

\n", + "value": 2097152 + }, + { + "type": "int", + "name": "GIT_CHECKOUT_DONT_REMOVE_EXISTING", + "comments": "

Don't overwrite existing files or folders

\n", + "value": 4194304 + }, + { + "type": "int", + "name": "GIT_CHECKOUT_DONT_WRITE_INDEX", + "comments": "

Normally checkout writes the index upon completion; this prevents that.

\n", + "value": 8388608 + }, + { + "type": "int", + "name": "GIT_CHECKOUT_UPDATE_SUBMODULES", + "comments": "

Recursively checkout submodules with same options (NOT IMPLEMENTED)

\n", + "value": 65536 + }, + { + "type": "int", + "name": "GIT_CHECKOUT_UPDATE_SUBMODULES_IF_CHANGED", + "comments": "

Recursively checkout submodules if HEAD moved in super repo (NOT IMPLEMENTED)

\n", + "value": 131072 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_cherrypick_options", + { + "decl": [ + "unsigned int version", + "unsigned int mainline", + "git_merge_options merge_opts", + "git_checkout_options checkout_opts" + ], + "type": "struct", + "value": "git_cherrypick_options", + "file": "cherrypick.h", + "line": 26, + "lineto": 34, + "block": "unsigned int version\nunsigned int mainline\ngit_merge_options merge_opts\ngit_checkout_options checkout_opts", + "tdef": "typedef", + "description": " Cherry-pick options", + "comments": "", + "fields": [ + { + "type": "unsigned int", + "name": "version", + "comments": "" + }, + { + "type": "unsigned int", + "name": "mainline", + "comments": " For merge commits, the \"mainline\" is treated as the parent. " + }, + { + "type": "git_merge_options", + "name": "merge_opts", + "comments": " Options for the merging " + }, + { + "type": "git_checkout_options", + "name": "checkout_opts", + "comments": " Options for the checkout " + } + ], + "used": { + "returns": [], + "needs": [ + "git_cherrypick", + "git_cherrypick_init_options" + ] + } + } + ], + [ + "git_clone_local_t", + { + "decl": [ + "GIT_CLONE_LOCAL_AUTO", + "GIT_CLONE_LOCAL", + "GIT_CLONE_NO_LOCAL", + "GIT_CLONE_LOCAL_NO_LINKS" + ], + "type": "enum", + "file": "clone.h", + "line": 33, + "lineto": 53, + "block": "GIT_CLONE_LOCAL_AUTO\nGIT_CLONE_LOCAL\nGIT_CLONE_NO_LOCAL\nGIT_CLONE_LOCAL_NO_LINKS", + "tdef": "typedef", + "description": " Options for bypassing the git-aware transport on clone. Bypassing\n it means that instead of a fetch, libgit2 will copy the object\n database directory instead of figuring out what it needs, which is\n faster. If possible, it will hardlink the files to save space.", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_CLONE_LOCAL_AUTO", + "comments": "

Auto-detect (default), libgit2 will bypass the git-aware\n transport for local paths, but use a normal fetch for\n file:// urls.

\n", + "value": 0 + }, + { + "type": "int", + "name": "GIT_CLONE_LOCAL", + "comments": "

Bypass the git-aware transport even for a file:// url.

\n", + "value": 1 + }, + { + "type": "int", + "name": "GIT_CLONE_NO_LOCAL", + "comments": "

Do no bypass the git-aware transport

\n", + "value": 2 + }, + { + "type": "int", + "name": "GIT_CLONE_LOCAL_NO_LINKS", + "comments": "

Bypass the git-aware transport, but do not try to use\n hardlinks.

\n", + "value": 3 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_clone_options", + { + "decl": [ + "unsigned int version", + "git_checkout_options checkout_opts", + "git_fetch_options fetch_opts", + "int bare", + "git_clone_local_t local", + "const char * checkout_branch", + "git_repository_create_cb repository_cb", + "void * repository_cb_payload", + "git_remote_create_cb remote_cb", + "void * remote_cb_payload" + ], + "type": "struct", + "value": "git_clone_options", + "file": "clone.h", + "line": 103, + "lineto": 164, + "block": "unsigned int version\ngit_checkout_options checkout_opts\ngit_fetch_options fetch_opts\nint bare\ngit_clone_local_t local\nconst char * checkout_branch\ngit_repository_create_cb repository_cb\nvoid * repository_cb_payload\ngit_remote_create_cb remote_cb\nvoid * remote_cb_payload", + "tdef": "typedef", + "description": " Clone options structure", + "comments": "

Use the GIT_CLONE_OPTIONS_INIT to get the default settings, like this:

\n\n
    git_clone_options opts = GIT_CLONE_OPTIONS_INIT;\n
\n", + "fields": [ + { + "type": "unsigned int", + "name": "version", + "comments": "" + }, + { + "type": "git_checkout_options", + "name": "checkout_opts", + "comments": " These options are passed to the checkout step. To disable\n checkout, set the `checkout_strategy` to\n `GIT_CHECKOUT_NONE`." + }, + { + "type": "git_fetch_options", + "name": "fetch_opts", + "comments": " Options which control the fetch, including callbacks.\n\n The callbacks are used for reporting fetch progress, and for acquiring\n credentials in the event they are needed." + }, + { + "type": "int", + "name": "bare", + "comments": " Set to zero (false) to create a standard repo, or non-zero\n for a bare repo" + }, + { + "type": "git_clone_local_t", + "name": "local", + "comments": " Whether to use a fetch or copy the object database." + }, + { + "type": "const char *", + "name": "checkout_branch", + "comments": " The name of the branch to checkout. NULL means use the\n remote's default branch." + }, + { + "type": "git_repository_create_cb", + "name": "repository_cb", + "comments": " A callback used to create the new repository into which to\n clone. If NULL, the 'bare' field will be used to determine\n whether to create a bare repository." + }, + { + "type": "void *", + "name": "repository_cb_payload", + "comments": " An opaque payload to pass to the git_repository creation callback.\n This parameter is ignored unless repository_cb is non-NULL." + }, + { + "type": "git_remote_create_cb", + "name": "remote_cb", + "comments": " A callback used to create the git_remote, prior to its being\n used to perform the clone operation. See the documentation for\n git_remote_create_cb for details. This parameter may be NULL,\n indicating that git_clone should provide default behavior." + }, + { + "type": "void *", + "name": "remote_cb_payload", + "comments": " An opaque payload to pass to the git_remote creation callback.\n This parameter is ignored unless remote_cb is non-NULL." + } + ], + "used": { + "returns": [], + "needs": [ + "git_clone", + "git_clone_init_options" + ] + } + } + ], + [ + "git_commit", + { + "decl": "git_commit", + "type": "struct", + "value": "git_commit", + "file": "types.h", + "line": 120, + "lineto": 120, + "tdef": "typedef", + "description": " Parsed representation of a commit object. ", + "comments": "", + "used": { + "returns": [], + "needs": [ + "git_branch_create", + "git_cherrypick", + "git_cherrypick_commit", + "git_commit_amend", + "git_commit_author", + "git_commit_body", + "git_commit_committer", + "git_commit_create", + "git_commit_create_buffer", + "git_commit_create_from_callback", + "git_commit_dup", + "git_commit_free", + "git_commit_header_field", + "git_commit_id", + "git_commit_lookup", + "git_commit_lookup_prefix", + "git_commit_message", + "git_commit_message_encoding", + "git_commit_message_raw", + "git_commit_nth_gen_ancestor", + "git_commit_owner", + "git_commit_parent", + "git_commit_parent_id", + "git_commit_parentcount", + "git_commit_raw_header", + "git_commit_summary", + "git_commit_time", + "git_commit_time_offset", + "git_commit_tree", + "git_commit_tree_id", + "git_diff_commit_as_email", + "git_merge_commits", + "git_revert", + "git_revert_commit" + ] + } + } + ], + [ + "git_config", + { + "decl": "git_config", + "type": "struct", + "value": "git_config", + "file": "types.h", + "line": 138, + "lineto": 138, + "tdef": "typedef", + "description": " Memory representation of a set of config files ", + "comments": "", + "used": { + "returns": [], + "needs": [ + "git_config_add_backend", + "git_config_add_file_ondisk", + "git_config_backend_foreach_match", + "git_config_delete_entry", + "git_config_delete_multivar", + "git_config_entry_free", + "git_config_foreach", + "git_config_foreach_match", + "git_config_free", + "git_config_get_bool", + "git_config_get_entry", + "git_config_get_int32", + "git_config_get_int64", + "git_config_get_mapped", + "git_config_get_multivar_foreach", + "git_config_get_path", + "git_config_get_string", + "git_config_get_string_buf", + "git_config_init_backend", + "git_config_iterator_free", + "git_config_iterator_glob_new", + "git_config_iterator_new", + "git_config_lock", + "git_config_multivar_iterator_new", + "git_config_new", + "git_config_next", + "git_config_open_default", + "git_config_open_global", + "git_config_open_level", + "git_config_open_ondisk", + "git_config_set_bool", + "git_config_set_int32", + "git_config_set_int64", + "git_config_set_multivar", + "git_config_set_string", + "git_config_snapshot", + "git_repository_config", + "git_repository_config_snapshot", + "git_repository_set_config" + ] + } + } + ], + [ + "git_config_backend", + { + "decl": "git_config_backend", + "type": "struct", + "value": "git_config_backend", + "file": "types.h", + "line": 141, + "lineto": 141, + "block": "unsigned int version\nint readonly\nstruct git_config * cfg\nint (*)(struct git_config_backend *, git_config_level_t) open\nint (*)(struct git_config_backend *, const char *, git_config_entry **) get\nint (*)(struct git_config_backend *, const char *, const char *) set\nint (*)(git_config_backend *, const char *, const char *, const char *) set_multivar\nint (*)(struct git_config_backend *, const char *) del\nint (*)(struct git_config_backend *, const char *, const char *) del_multivar\nint (*)(git_config_iterator **, struct git_config_backend *) iterator\nint (*)(struct git_config_backend **, struct git_config_backend *) snapshot\nint (*)(struct git_config_backend *) lock\nint (*)(struct git_config_backend *, int) unlock\nvoid (*)(struct git_config_backend *) free", + "tdef": "typedef", + "description": " Interface to access a configuration file ", + "comments": "", + "fields": [ + { + "type": "unsigned int", + "name": "version", + "comments": "" + }, + { + "type": "int", + "name": "readonly", + "comments": " True if this backend is for a snapshot " + }, + { + "type": "struct git_config *", + "name": "cfg", + "comments": "" + }, + { + "type": "int (*)(struct git_config_backend *, git_config_level_t)", + "name": "open", + "comments": "" + }, + { + "type": "int (*)(struct git_config_backend *, const char *, git_config_entry **)", + "name": "get", + "comments": "" + }, + { + "type": "int (*)(struct git_config_backend *, const char *, const char *)", + "name": "set", + "comments": "" + }, + { + "type": "int (*)(git_config_backend *, const char *, const char *, const char *)", + "name": "set_multivar", + "comments": "" + }, + { + "type": "int (*)(struct git_config_backend *, const char *)", + "name": "del", + "comments": "" + }, + { + "type": "int (*)(struct git_config_backend *, const char *, const char *)", + "name": "del_multivar", + "comments": "" + }, + { + "type": "int (*)(git_config_iterator **, struct git_config_backend *)", + "name": "iterator", + "comments": "" + }, + { + "type": "int (*)(struct git_config_backend **, struct git_config_backend *)", + "name": "snapshot", + "comments": " Produce a read-only version of this backend " + }, + { + "type": "int (*)(struct git_config_backend *)", + "name": "lock", + "comments": " Lock this backend.\n\n Prevent any writes to the data store backing this\n backend. Any updates must not be visible to any other\n readers." + }, + { + "type": "int (*)(struct git_config_backend *, int)", + "name": "unlock", + "comments": " Unlock the data store backing this backend. If success is\n true, the changes should be committed, otherwise rolled\n back." + }, + { + "type": "void (*)(struct git_config_backend *)", + "name": "free", + "comments": "" + } + ], + "used": { + "returns": [], + "needs": [ + "git_config_add_backend", + "git_config_backend_foreach_match", + "git_config_init_backend" + ] + } + } + ], + [ + "git_config_entry", + { + "decl": [ + "const char * name", + "const char * value", + "git_config_level_t level", + "void (*)(struct git_config_entry *) free", + "void * payload" + ], + "type": "struct", + "value": "git_config_entry", + "file": "config.h", + "line": 64, + "lineto": 70, + "block": "const char * name\nconst char * value\ngit_config_level_t level\nvoid (*)(struct git_config_entry *) free\nvoid * payload", + "tdef": "typedef", + "description": " An entry in a configuration file", + "comments": "", + "fields": [ + { + "type": "const char *", + "name": "name", + "comments": " Name of the entry (normalised) " + }, + { + "type": "const char *", + "name": "value", + "comments": " String value of the entry " + }, + { + "type": "git_config_level_t", + "name": "level", + "comments": " Which config file this was found in " + }, + { + "type": "void (*)(struct git_config_entry *)", + "name": "free", + "comments": " Free function for this entry " + }, + { + "type": "void *", + "name": "payload", + "comments": " Opaque value for the free function. Do not read or write " + } + ], + "used": { + "returns": [], + "needs": [ + "git_config_entry_free", + "git_config_get_entry", + "git_config_next" + ] + } + } + ], + [ + "git_config_iterator", + { + "decl": [ + "git_config_backend * backend", + "unsigned int flags", + "int (*)(git_config_entry **, git_config_iterator *) next", + "void (*)(git_config_iterator *) free" + ], + "type": "struct", + "value": "git_config_iterator", + "file": "sys/config.h", + "line": 34, + "lineto": 48, + "block": "git_config_backend * backend\nunsigned int flags\nint (*)(git_config_entry **, git_config_iterator *) next\nvoid (*)(git_config_iterator *) free", + "tdef": null, + "description": " Every iterator must have this struct as its first element, so the\n API can talk to it. You'd define your iterator as", + "comments": "
 struct my_iterator {             git_config_iterator parent;             ...     }\n
\n\n

and assign iter->parent.backend to your git_config_backend.

\n", + "fields": [ + { + "type": "git_config_backend *", + "name": "backend", + "comments": "" + }, + { + "type": "unsigned int", + "name": "flags", + "comments": "" + }, + { + "type": "int (*)(git_config_entry **, git_config_iterator *)", + "name": "next", + "comments": " Return the current entry and advance the iterator. The\n memory belongs to the library." + }, + { + "type": "void (*)(git_config_iterator *)", + "name": "free", + "comments": " Free the iterator" + } + ], + "used": { + "returns": [], + "needs": [ + "git_config_iterator_free", + "git_config_iterator_glob_new", + "git_config_iterator_new", + "git_config_multivar_iterator_new", + "git_config_next" + ] + } + } + ], + [ + "git_config_level_t", + { + "decl": [ + "GIT_CONFIG_LEVEL_PROGRAMDATA", + "GIT_CONFIG_LEVEL_SYSTEM", + "GIT_CONFIG_LEVEL_XDG", + "GIT_CONFIG_LEVEL_GLOBAL", + "GIT_CONFIG_LEVEL_LOCAL", + "GIT_CONFIG_LEVEL_APP", + "GIT_CONFIG_HIGHEST_LEVEL" + ], + "type": "enum", + "file": "config.h", + "line": 31, + "lineto": 59, + "block": "GIT_CONFIG_LEVEL_PROGRAMDATA\nGIT_CONFIG_LEVEL_SYSTEM\nGIT_CONFIG_LEVEL_XDG\nGIT_CONFIG_LEVEL_GLOBAL\nGIT_CONFIG_LEVEL_LOCAL\nGIT_CONFIG_LEVEL_APP\nGIT_CONFIG_HIGHEST_LEVEL", + "tdef": "typedef", + "description": " Priority level of a config file.\n These priority levels correspond to the natural escalation logic\n (from higher to lower) when searching for config entries in git.git.", + "comments": "

git_config_open_default() and git_repository_config() honor those priority levels as well.

\n", + "fields": [ + { + "type": "int", + "name": "GIT_CONFIG_LEVEL_PROGRAMDATA", + "comments": "

System-wide on Windows, for compatibility with portable git

\n", + "value": 1 + }, + { + "type": "int", + "name": "GIT_CONFIG_LEVEL_SYSTEM", + "comments": "

System-wide configuration file; /etc/gitconfig on Linux systems

\n", + "value": 2 + }, + { + "type": "int", + "name": "GIT_CONFIG_LEVEL_XDG", + "comments": "

XDG compatible configuration file; typically ~/.config/git/config

\n", + "value": 3 + }, + { + "type": "int", + "name": "GIT_CONFIG_LEVEL_GLOBAL", + "comments": "

User-specific configuration file (also called Global configuration\n file); typically ~/.gitconfig

\n", + "value": 4 + }, + { + "type": "int", + "name": "GIT_CONFIG_LEVEL_LOCAL", + "comments": "

Repository specific configuration file; $WORK_DIR/.git/config on\n non-bare repos

\n", + "value": 5 + }, + { + "type": "int", + "name": "GIT_CONFIG_LEVEL_APP", + "comments": "

Application specific configuration file; freely defined by applications

\n", + "value": 6 + }, + { + "type": "int", + "name": "GIT_CONFIG_HIGHEST_LEVEL", + "comments": "

Represents the highest level available config file (i.e. the most\n specific config file available that actually is loaded)

\n", + "value": -1 + } + ], + "used": { + "returns": [], + "needs": [ + "git_config_add_backend", + "git_config_add_file_ondisk", + "git_config_open_level" + ] + } + } + ], + [ + "git_cred_default", + { + "decl": "git_cred_default", + "type": "struct", + "value": "git_cred_default", + "file": "transport.h", + "line": 176, + "lineto": 176, + "tdef": "typedef", + "description": " A key for NTLM/Kerberos \"default\" credentials ", + "comments": "", + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_cred_ssh_custom", + { + "decl": [ + "git_cred parent", + "char * username", + "char * publickey", + "size_t publickey_len", + "git_cred_sign_callback sign_callback", + "void * payload" + ], + "type": "struct", + "value": "git_cred_ssh_custom", + "file": "transport.h", + "line": 166, + "lineto": 173, + "block": "git_cred parent\nchar * username\nchar * publickey\nsize_t publickey_len\ngit_cred_sign_callback sign_callback\nvoid * payload", + "tdef": "typedef", + "description": " A key with a custom signature function", + "comments": "", + "fields": [ + { + "type": "git_cred", + "name": "parent", + "comments": "" + }, + { + "type": "char *", + "name": "username", + "comments": "" + }, + { + "type": "char *", + "name": "publickey", + "comments": "" + }, + { + "type": "size_t", + "name": "publickey_len", + "comments": "" + }, + { + "type": "git_cred_sign_callback", + "name": "sign_callback", + "comments": "" + }, + { + "type": "void *", + "name": "payload", + "comments": "" + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_cred_ssh_interactive", + { + "decl": [ + "git_cred parent", + "char * username", + "git_cred_ssh_interactive_callback prompt_callback", + "void * payload" + ], + "type": "struct", + "value": "git_cred_ssh_interactive", + "file": "transport.h", + "line": 156, + "lineto": 161, + "block": "git_cred parent\nchar * username\ngit_cred_ssh_interactive_callback prompt_callback\nvoid * payload", + "tdef": "typedef", + "description": " Keyboard-interactive based ssh authentication", + "comments": "", + "fields": [ + { + "type": "git_cred", + "name": "parent", + "comments": "" + }, + { + "type": "char *", + "name": "username", + "comments": "" + }, + { + "type": "git_cred_ssh_interactive_callback", + "name": "prompt_callback", + "comments": "" + }, + { + "type": "void *", + "name": "payload", + "comments": "" + } + ], + "used": { + "returns": [], + "needs": [ + "git_cred_ssh_interactive_new" + ] + } + } + ], + [ + "git_cred_ssh_key", + { + "decl": [ + "git_cred parent", + "char * username", + "char * publickey", + "char * privatekey", + "char * passphrase" + ], + "type": "struct", + "value": "git_cred_ssh_key", + "file": "transport.h", + "line": 145, + "lineto": 151, + "block": "git_cred parent\nchar * username\nchar * publickey\nchar * privatekey\nchar * passphrase", + "tdef": "typedef", + "description": " A ssh key from disk", + "comments": "", + "fields": [ + { + "type": "git_cred", + "name": "parent", + "comments": "" + }, + { + "type": "char *", + "name": "username", + "comments": "" + }, + { + "type": "char *", + "name": "publickey", + "comments": "" + }, + { + "type": "char *", + "name": "privatekey", + "comments": "" + }, + { + "type": "char *", + "name": "passphrase", + "comments": "" + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_cred_username", + { + "decl": [ + "git_cred parent", + "char [1] username" + ], + "type": "struct", + "value": "git_cred_username", + "file": "transport.h", + "line": 179, + "lineto": 182, + "block": "git_cred parent\nchar [1] username", + "tdef": "typedef", + "description": " Username-only credential information ", + "comments": "", + "fields": [ + { + "type": "git_cred", + "name": "parent", + "comments": "" + }, + { + "type": "char [1]", + "name": "username", + "comments": "" + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_cred_userpass_payload", + { + "decl": [ + "const char * username", + "const char * password" + ], + "type": "struct", + "value": "git_cred_userpass_payload", + "file": "cred_helpers.h", + "line": 24, + "lineto": 27, + "block": "const char * username\nconst char * password", + "tdef": "typedef", + "description": " Payload for git_cred_stock_userpass_plaintext.", + "comments": "", + "fields": [ + { + "type": "const char *", + "name": "username", + "comments": "" + }, + { + "type": "const char *", + "name": "password", + "comments": "" + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_cred_userpass_plaintext", + { + "decl": [ + "git_cred parent", + "char * username", + "char * password" + ], + "type": "struct", + "value": "git_cred_userpass_plaintext", + "file": "transport.h", + "line": 122, + "lineto": 126, + "block": "git_cred parent\nchar * username\nchar * password", + "tdef": "typedef", + "description": " A plaintext username and password ", + "comments": "", + "fields": [ + { + "type": "git_cred", + "name": "parent", + "comments": "" + }, + { + "type": "char *", + "name": "username", + "comments": "" + }, + { + "type": "char *", + "name": "password", + "comments": "" + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_credtype_t", + { + "decl": [ + "GIT_CREDTYPE_USERPASS_PLAINTEXT", + "GIT_CREDTYPE_SSH_KEY", + "GIT_CREDTYPE_SSH_CUSTOM", + "GIT_CREDTYPE_DEFAULT", + "GIT_CREDTYPE_SSH_INTERACTIVE", + "GIT_CREDTYPE_USERNAME", + "GIT_CREDTYPE_SSH_MEMORY" + ], + "type": "enum", + "file": "transport.h", + "line": 81, + "lineto": 111, + "block": "GIT_CREDTYPE_USERPASS_PLAINTEXT\nGIT_CREDTYPE_SSH_KEY\nGIT_CREDTYPE_SSH_CUSTOM\nGIT_CREDTYPE_DEFAULT\nGIT_CREDTYPE_SSH_INTERACTIVE\nGIT_CREDTYPE_USERNAME\nGIT_CREDTYPE_SSH_MEMORY", + "tdef": "typedef", + "description": " Authentication type requested ", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_CREDTYPE_USERPASS_PLAINTEXT", + "comments": "", + "value": 1 + }, + { + "type": "int", + "name": "GIT_CREDTYPE_SSH_KEY", + "comments": "", + "value": 2 + }, + { + "type": "int", + "name": "GIT_CREDTYPE_SSH_CUSTOM", + "comments": "", + "value": 4 + }, + { + "type": "int", + "name": "GIT_CREDTYPE_DEFAULT", + "comments": "", + "value": 8 + }, + { + "type": "int", + "name": "GIT_CREDTYPE_SSH_INTERACTIVE", + "comments": "", + "value": 16 + }, + { + "type": "int", + "name": "GIT_CREDTYPE_USERNAME", + "comments": "

Username-only information

\n\n

If the SSH transport does not know which username to use,\n it will ask via this credential type.

\n", + "value": 32 + }, + { + "type": "int", + "name": "GIT_CREDTYPE_SSH_MEMORY", + "comments": "

Credentials read from memory.

\n\n

Only available for libssh2+OpenSSL for now.

\n", + "value": 64 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_cvar_map", + { + "decl": [ + "git_cvar_t cvar_type", + "const char * str_match", + "int map_value" + ], + "type": "struct", + "value": "git_cvar_map", + "file": "config.h", + "line": 93, + "lineto": 97, + "block": "git_cvar_t cvar_type\nconst char * str_match\nint map_value", + "tdef": "typedef", + "description": " Mapping from config variables to values.", + "comments": "", + "fields": [ + { + "type": "git_cvar_t", + "name": "cvar_type", + "comments": "" + }, + { + "type": "const char *", + "name": "str_match", + "comments": "" + }, + { + "type": "int", + "name": "map_value", + "comments": "" + } + ], + "used": { + "returns": [], + "needs": [ + "git_config_get_mapped", + "git_config_lookup_map_value" + ] + } + } + ], + [ + "git_cvar_t", + { + "decl": [ + "GIT_CVAR_FALSE", + "GIT_CVAR_TRUE", + "GIT_CVAR_INT32", + "GIT_CVAR_STRING" + ], + "type": "enum", + "file": "config.h", + "line": 83, + "lineto": 88, + "block": "GIT_CVAR_FALSE\nGIT_CVAR_TRUE\nGIT_CVAR_INT32\nGIT_CVAR_STRING", + "tdef": "typedef", + "description": " Config var type", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_CVAR_FALSE", + "comments": "", + "value": 0 + }, + { + "type": "int", + "name": "GIT_CVAR_TRUE", + "comments": "", + "value": 1 + }, + { + "type": "int", + "name": "GIT_CVAR_INT32", + "comments": "", + "value": 2 + }, + { + "type": "int", + "name": "GIT_CVAR_STRING", + "comments": "", + "value": 3 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_delta_t", + { + "decl": [ + "GIT_DELTA_UNMODIFIED", + "GIT_DELTA_ADDED", + "GIT_DELTA_DELETED", + "GIT_DELTA_MODIFIED", + "GIT_DELTA_RENAMED", + "GIT_DELTA_COPIED", + "GIT_DELTA_IGNORED", + "GIT_DELTA_UNTRACKED", + "GIT_DELTA_TYPECHANGE", + "GIT_DELTA_UNREADABLE", + "GIT_DELTA_CONFLICTED" + ], + "type": "enum", + "file": "diff.h", + "line": 246, + "lineto": 258, + "block": "GIT_DELTA_UNMODIFIED\nGIT_DELTA_ADDED\nGIT_DELTA_DELETED\nGIT_DELTA_MODIFIED\nGIT_DELTA_RENAMED\nGIT_DELTA_COPIED\nGIT_DELTA_IGNORED\nGIT_DELTA_UNTRACKED\nGIT_DELTA_TYPECHANGE\nGIT_DELTA_UNREADABLE\nGIT_DELTA_CONFLICTED", + "tdef": "typedef", + "description": " What type of change is described by a git_diff_delta?", + "comments": "

GIT_DELTA_RENAMED and GIT_DELTA_COPIED will only show up if you run git_diff_find_similar() on the diff object.

\n\n

GIT_DELTA_TYPECHANGE only shows up given GIT_DIFF_INCLUDE_TYPECHANGE in the option flags (otherwise type changes will be split into ADDED / DELETED pairs).

\n", + "fields": [ + { + "type": "int", + "name": "GIT_DELTA_UNMODIFIED", + "comments": "

no changes

\n", + "value": 0 + }, + { + "type": "int", + "name": "GIT_DELTA_ADDED", + "comments": "

entry does not exist in old version

\n", + "value": 1 + }, + { + "type": "int", + "name": "GIT_DELTA_DELETED", + "comments": "

entry does not exist in new version

\n", + "value": 2 + }, + { + "type": "int", + "name": "GIT_DELTA_MODIFIED", + "comments": "

entry content changed between old and new

\n", + "value": 3 + }, + { + "type": "int", + "name": "GIT_DELTA_RENAMED", + "comments": "

entry was renamed between old and new

\n", + "value": 4 + }, + { + "type": "int", + "name": "GIT_DELTA_COPIED", + "comments": "

entry was copied from another old entry

\n", + "value": 5 + }, + { + "type": "int", + "name": "GIT_DELTA_IGNORED", + "comments": "

entry is ignored item in workdir

\n", + "value": 6 + }, + { + "type": "int", + "name": "GIT_DELTA_UNTRACKED", + "comments": "

entry is untracked item in workdir

\n", + "value": 7 + }, + { + "type": "int", + "name": "GIT_DELTA_TYPECHANGE", + "comments": "

type of entry changed between old and new

\n", + "value": 8 + }, + { + "type": "int", + "name": "GIT_DELTA_UNREADABLE", + "comments": "

entry is unreadable

\n", + "value": 9 + }, + { + "type": "int", + "name": "GIT_DELTA_CONFLICTED", + "comments": "

entry in the index is conflicted

\n", + "value": 10 + } + ], + "used": { + "returns": [], + "needs": [ + "git_diff_num_deltas_of_type", + "git_diff_status_char" + ] + } + } + ], + [ + "git_describe_format_options", + { + "decl": [ + "unsigned int version", + "unsigned int abbreviated_size", + "int always_use_long_format", + "const char * dirty_suffix" + ], + "type": "struct", + "value": "git_describe_format_options", + "file": "describe.h", + "line": 78, + "lineto": 98, + "block": "unsigned int version\nunsigned int abbreviated_size\nint always_use_long_format\nconst char * dirty_suffix", + "tdef": "typedef", + "description": " Options for formatting the describe string", + "comments": "", + "fields": [ + { + "type": "unsigned int", + "name": "version", + "comments": "" + }, + { + "type": "unsigned int", + "name": "abbreviated_size", + "comments": " Size of the abbreviated commit id to use. This value is the\n lower bound for the length of the abbreviated string. The\n default is 7." + }, + { + "type": "int", + "name": "always_use_long_format", + "comments": " Set to use the long format even when a shorter name could be used." + }, + { + "type": "const char *", + "name": "dirty_suffix", + "comments": " If the workdir is dirty and this is set, this string will\n be appended to the description string." + } + ], + "used": { + "returns": [], + "needs": [ + "git_describe_format" + ] + } + } + ], + [ + "git_describe_options", + { + "decl": [ + "unsigned int version", + "unsigned int max_candidates_tags", + "unsigned int describe_strategy", + "const char * pattern", + "int only_follow_first_parent", + "int show_commit_oid_as_fallback" + ], + "type": "struct", + "value": "git_describe_options", + "file": "describe.h", + "line": 44, + "lineto": 62, + "block": "unsigned int version\nunsigned int max_candidates_tags\nunsigned int describe_strategy\nconst char * pattern\nint only_follow_first_parent\nint show_commit_oid_as_fallback", + "tdef": "typedef", + "description": " Describe options structure", + "comments": "

Initialize with GIT_DESCRIBE_OPTIONS_INIT macro to correctly set the version field. E.g.

\n\n
    git_describe_options opts = GIT_DESCRIBE_OPTIONS_INIT;\n
\n", + "fields": [ + { + "type": "unsigned int", + "name": "version", + "comments": "" + }, + { + "type": "unsigned int", + "name": "max_candidates_tags", + "comments": "" + }, + { + "type": "unsigned int", + "name": "describe_strategy", + "comments": " default: 10 " + }, + { + "type": "const char *", + "name": "pattern", + "comments": " default: GIT_DESCRIBE_DEFAULT " + }, + { + "type": "int", + "name": "only_follow_first_parent", + "comments": " When calculating the distance from the matching tag or\n reference, only walk down the first-parent ancestry." + }, + { + "type": "int", + "name": "show_commit_oid_as_fallback", + "comments": " If no matching tag or reference is found, the describe\n operation would normally fail. If this option is set, it\n will instead fall back to showing the full id of the\n commit." + } + ], + "used": { + "returns": [], + "needs": [ + "git_describe_commit", + "git_describe_workdir" + ] + } + } + ], + [ + "git_describe_strategy_t", + { + "decl": [ + "GIT_DESCRIBE_DEFAULT", + "GIT_DESCRIBE_TAGS", + "GIT_DESCRIBE_ALL" + ], + "type": "enum", + "file": "describe.h", + "line": 30, + "lineto": 34, + "block": "GIT_DESCRIBE_DEFAULT\nGIT_DESCRIBE_TAGS\nGIT_DESCRIBE_ALL", + "tdef": "typedef", + "description": " Reference lookup strategy", + "comments": "

These behave like the --tags and --all optios to git-describe, namely they say to look for any reference in either refs/tags/ or refs/ respectively.

\n", + "fields": [ + { + "type": "int", + "name": "GIT_DESCRIBE_DEFAULT", + "comments": "", + "value": 0 + }, + { + "type": "int", + "name": "GIT_DESCRIBE_TAGS", + "comments": "", + "value": 1 + }, + { + "type": "int", + "name": "GIT_DESCRIBE_ALL", + "comments": "", + "value": 2 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_diff", + { + "decl": "git_diff", + "type": "struct", + "value": "git_diff", + "file": "diff.h", + "line": 219, + "lineto": 219, + "tdef": "typedef", + "description": " The diff object that contains all individual file deltas.", + "comments": "

This is an opaque structure which will be allocated by one of the diff generator functions below (such as git_diff_tree_to_tree). You are responsible for releasing the object memory when done, using the git_diff_free() function.

\n", + "used": { + "returns": [ + "git_diff_get_delta", + "git_patch_get_delta", + "git_pathspec_match_list_diff_entry" + ], + "needs": [ + "git_checkout_notify_cb", + "git_diff_binary_cb", + "git_diff_blob_to_buffer", + "git_diff_blobs", + "git_diff_buffers", + "git_diff_commit_as_email", + "git_diff_file_cb", + "git_diff_find_init_options", + "git_diff_find_similar", + "git_diff_foreach", + "git_diff_format_email", + "git_diff_format_email_init_options", + "git_diff_free", + "git_diff_get_delta", + "git_diff_get_perfdata", + "git_diff_get_stats", + "git_diff_hunk_cb", + "git_diff_index_to_index", + "git_diff_index_to_workdir", + "git_diff_init_options", + "git_diff_is_sorted_icase", + "git_diff_line_cb", + "git_diff_merge", + "git_diff_notify_cb", + "git_diff_num_deltas", + "git_diff_num_deltas_of_type", + "git_diff_print", + "git_diff_print_callback__to_buf", + "git_diff_print_callback__to_file_handle", + "git_diff_progress_cb", + "git_diff_stats_deletions", + "git_diff_stats_files_changed", + "git_diff_stats_free", + "git_diff_stats_insertions", + "git_diff_stats_to_buf", + "git_diff_tree_to_index", + "git_diff_tree_to_tree", + "git_diff_tree_to_workdir", + "git_diff_tree_to_workdir_with_index", + "git_patch_from_blob_and_buffer", + "git_patch_from_blobs", + "git_patch_from_buffers", + "git_patch_from_diff", + "git_patch_get_hunk", + "git_patch_get_line_in_hunk", + "git_patch_print", + "git_pathspec_match_diff", + "git_status_list_get_perfdata" + ] + } + } + ], + [ + "git_diff_binary", + { + "decl": [ + "git_diff_binary_file old_file", + "git_diff_binary_file new_file" + ], + "type": "struct", + "value": "git_diff_binary", + "file": "diff.h", + "line": 484, + "lineto": 487, + "block": "git_diff_binary_file old_file\ngit_diff_binary_file new_file", + "tdef": "typedef", + "description": " Structure describing the binary contents of a diff. ", + "comments": "", + "fields": [ + { + "type": "git_diff_binary_file", + "name": "old_file", + "comments": " The contents of the old file. " + }, + { + "type": "git_diff_binary_file", + "name": "new_file", + "comments": " The contents of the new file. " + } + ], + "used": { + "returns": [], + "needs": [ + "git_diff_binary_cb", + "git_diff_blob_to_buffer", + "git_diff_blobs", + "git_diff_buffers", + "git_diff_foreach" + ] + } + } + ], + [ + "git_diff_binary_file", + { + "decl": [ + "git_diff_binary_t type", + "const char * data", + "size_t datalen", + "size_t inflatedlen" + ], + "type": "struct", + "value": "git_diff_binary_file", + "file": "diff.h", + "line": 469, + "lineto": 481, + "block": "git_diff_binary_t type\nconst char * data\nsize_t datalen\nsize_t inflatedlen", + "tdef": "typedef", + "description": " The contents of one of the files in a binary diff. ", + "comments": "", + "fields": [ + { + "type": "git_diff_binary_t", + "name": "type", + "comments": " The type of binary data for this file. " + }, + { + "type": "const char *", + "name": "data", + "comments": " The binary data, deflated. " + }, + { + "type": "size_t", + "name": "datalen", + "comments": " The length of the binary data. " + }, + { + "type": "size_t", + "name": "inflatedlen", + "comments": " The length of the binary data after inflation. " + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_diff_binary_t", + { + "decl": [ + "GIT_DIFF_BINARY_NONE", + "GIT_DIFF_BINARY_LITERAL", + "GIT_DIFF_BINARY_DELTA" + ], + "type": "enum", + "file": "diff.h", + "line": 457, + "lineto": 466, + "block": "GIT_DIFF_BINARY_NONE\nGIT_DIFF_BINARY_LITERAL\nGIT_DIFF_BINARY_DELTA", + "tdef": "typedef", + "description": " When producing a binary diff, the binary data returned will be\n either the deflated full (\"literal\") contents of the file, or\n the deflated binary delta between the two sides (whichever is\n smaller).", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_DIFF_BINARY_NONE", + "comments": "

There is no binary delta.

\n", + "value": 0 + }, + { + "type": "int", + "name": "GIT_DIFF_BINARY_LITERAL", + "comments": "

The binary data is the literal contents of the file.

\n", + "value": 1 + }, + { + "type": "int", + "name": "GIT_DIFF_BINARY_DELTA", + "comments": "

The binary data is the delta from one side to the other.

\n", + "value": 2 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_diff_delta", + { + "decl": [ + "git_delta_t status", + "uint32_t flags", + "uint16_t similarity", + "uint16_t nfiles", + "git_diff_file old_file", + "git_diff_file new_file" + ], + "type": "struct", + "value": "git_diff_delta", + "file": "diff.h", + "line": 325, + "lineto": 332, + "block": "git_delta_t status\nuint32_t flags\nuint16_t similarity\nuint16_t nfiles\ngit_diff_file old_file\ngit_diff_file new_file", + "tdef": "typedef", + "description": " Description of changes to one entry.", + "comments": "

When iterating over a diff, this will be passed to most callbacks and you can use the contents to understand exactly what has changed.

\n\n

The old_file represents the "from" side of the diff and the new_file represents to "to" side of the diff. What those means depend on the function that was used to generate the diff and will be documented below. You can also use the GIT_DIFF_REVERSE flag to flip it around.

\n\n

Although the two sides of the delta are named "old_file" and "new_file", they actually may correspond to entries that represent a file, a symbolic link, a submodule commit id, or even a tree (if you are tracking type changes or ignored/untracked directories).

\n\n

Under some circumstances, in the name of efficiency, not all fields will be filled in, but we generally try to fill in as much as possible. One example is that the "flags" field may not have either the BINARY or the NOT_BINARY flag set to avoid examining file contents if you do not pass in hunk and/or line callbacks to the diff foreach iteration function. It will just use the git attributes for those files.

\n\n

The similarity score is zero unless you call git_diff_find_similar() which does a similarity analysis of files in the diff. Use that function to do rename and copy detection, and to split heavily modified files in add/delete pairs. After that call, deltas with a status of GIT_DELTA_RENAMED or GIT_DELTA_COPIED will have a similarity score between 0 and 100 indicating how similar the old and new sides are.

\n\n

If you ask git_diff_find_similar to find heavily modified files to break, but to not actually break the records, then GIT_DELTA_MODIFIED records may have a non-zero similarity score if the self-similarity is below the split threshold. To display this value like core Git, invert the score (a la printf("M%03d", 100 - delta->similarity)).

\n", + "fields": [ + { + "type": "git_delta_t", + "name": "status", + "comments": "" + }, + { + "type": "uint32_t", + "name": "flags", + "comments": " git_diff_flag_t values " + }, + { + "type": "uint16_t", + "name": "similarity", + "comments": " for RENAMED and COPIED, value 0-100 " + }, + { + "type": "uint16_t", + "name": "nfiles", + "comments": " number of files in this delta " + }, + { + "type": "git_diff_file", + "name": "old_file", + "comments": "" + }, + { + "type": "git_diff_file", + "name": "new_file", + "comments": "" + } + ], + "used": { + "returns": [ + "git_diff_get_delta", + "git_patch_get_delta", + "git_pathspec_match_list_diff_entry" + ], + "needs": [ + "git_diff_binary_cb", + "git_diff_file_cb", + "git_diff_hunk_cb", + "git_diff_line_cb", + "git_diff_notify_cb", + "git_diff_print_callback__to_buf", + "git_diff_print_callback__to_file_handle" + ] + } + } + ], + [ + "git_diff_file", + { + "decl": [ + "git_oid id", + "const char * path", + "git_off_t size", + "uint32_t flags", + "uint16_t mode" + ], + "type": "struct", + "value": "git_diff_file", + "file": "diff.h", + "line": 281, + "lineto": 287, + "block": "git_oid id\nconst char * path\ngit_off_t size\nuint32_t flags\nuint16_t mode", + "tdef": "typedef", + "description": " Description of one side of a delta.", + "comments": "

Although this is called a "file", it could represent a file, a symbolic link, a submodule commit id, or even a tree (although that only if you are tracking type changes or ignored/untracked directories).

\n\n

The oid is the git_oid of the item. If the entry represents an absent side of a diff (e.g. the old_file of a GIT_DELTA_ADDED delta), then the oid will be zeroes.

\n\n

path is the NUL-terminated path to the entry relative to the working directory of the repository.

\n\n

size is the size of the entry in bytes.

\n\n

flags is a combination of the git_diff_flag_t types

\n\n

mode is, roughly, the stat() st_mode value for the item. This will be restricted to one of the git_filemode_t values.

\n", + "fields": [ + { + "type": "git_oid", + "name": "id", + "comments": "" + }, + { + "type": "const char *", + "name": "path", + "comments": "" + }, + { + "type": "git_off_t", + "name": "size", + "comments": "" + }, + { + "type": "uint32_t", + "name": "flags", + "comments": "" + }, + { + "type": "uint16_t", + "name": "mode", + "comments": "" + } + ], + "used": { + "returns": [], + "needs": [ + "git_checkout_notify_cb", + "git_diff_blob_to_buffer", + "git_diff_blobs", + "git_diff_buffers", + "git_diff_foreach" + ] + } + } + ], + [ + "git_diff_find_options", + { + "decl": [ + "unsigned int version", + "uint32_t flags", + "uint16_t rename_threshold", + "uint16_t rename_from_rewrite_threshold", + "uint16_t copy_threshold", + "uint16_t break_rewrite_threshold", + "size_t rename_limit", + "git_diff_similarity_metric * metric" + ], + "type": "struct", + "value": "git_diff_find_options", + "file": "diff.h", + "line": 681, + "lineto": 707, + "block": "unsigned int version\nuint32_t flags\nuint16_t rename_threshold\nuint16_t rename_from_rewrite_threshold\nuint16_t copy_threshold\nuint16_t break_rewrite_threshold\nsize_t rename_limit\ngit_diff_similarity_metric * metric", + "tdef": "typedef", + "description": " Control behavior of rename and copy detection", + "comments": "

These options mostly mimic parameters that can be passed to git-diff.

\n\n
    \n
  • rename_threshold is the same as the -M option with a value - copy_threshold is the same as the -C option with a value - rename_from_rewrite_threshold matches the top of the -B option - break_rewrite_threshold matches the bottom of the -B option - rename_limit is the maximum number of matches to consider for a particular file. This is a little different from the -l option to regular Git because we will still process up to this many matches before abandoning the search.
  • \n
\n\n

The metric option allows you to plug in a custom similarity metric. Set it to NULL for the default internal metric which is based on sampling hashes of ranges of data in the file. The default metric is a pretty good similarity approximation that should work fairly well for both text and binary data, and is pretty fast with fixed memory overhead.

\n", + "fields": [ + { + "type": "unsigned int", + "name": "version", + "comments": "" + }, + { + "type": "uint32_t", + "name": "flags", + "comments": " Combination of git_diff_find_t values (default GIT_DIFF_FIND_BY_CONFIG).\n NOTE: if you don't explicitly set this, `diff.renames` could be set\n to false, resulting in `git_diff_find_similar` doing nothing." + }, + { + "type": "uint16_t", + "name": "rename_threshold", + "comments": " Similarity to consider a file renamed (default 50) " + }, + { + "type": "uint16_t", + "name": "rename_from_rewrite_threshold", + "comments": " Similarity of modified to be eligible rename source (default 50) " + }, + { + "type": "uint16_t", + "name": "copy_threshold", + "comments": " Similarity to consider a file a copy (default 50) " + }, + { + "type": "uint16_t", + "name": "break_rewrite_threshold", + "comments": " Similarity to split modify into delete/add pair (default 60) " + }, + { + "type": "size_t", + "name": "rename_limit", + "comments": " Maximum similarity sources to examine for a file (somewhat like\n git-diff's `-l` option or `diff.renameLimit` config) (default 200)" + }, + { + "type": "git_diff_similarity_metric *", + "name": "metric", + "comments": " Pluggable similarity metric; pass NULL to use internal metric " + } + ], + "used": { + "returns": [], + "needs": [ + "git_diff_find_init_options", + "git_diff_find_similar" + ] + } + } + ], + [ + "git_diff_find_t", + { + "decl": [ + "GIT_DIFF_FIND_BY_CONFIG", + "GIT_DIFF_FIND_RENAMES", + "GIT_DIFF_FIND_RENAMES_FROM_REWRITES", + "GIT_DIFF_FIND_COPIES", + "GIT_DIFF_FIND_COPIES_FROM_UNMODIFIED", + "GIT_DIFF_FIND_REWRITES", + "GIT_DIFF_BREAK_REWRITES", + "GIT_DIFF_FIND_AND_BREAK_REWRITES", + "GIT_DIFF_FIND_FOR_UNTRACKED", + "GIT_DIFF_FIND_ALL", + "GIT_DIFF_FIND_IGNORE_LEADING_WHITESPACE", + "GIT_DIFF_FIND_IGNORE_WHITESPACE", + "GIT_DIFF_FIND_DONT_IGNORE_WHITESPACE", + "GIT_DIFF_FIND_EXACT_MATCH_ONLY", + "GIT_DIFF_BREAK_REWRITES_FOR_RENAMES_ONLY", + "GIT_DIFF_FIND_REMOVE_UNMODIFIED" + ], + "type": "enum", + "file": "diff.h", + "line": 575, + "lineto": 644, + "block": "GIT_DIFF_FIND_BY_CONFIG\nGIT_DIFF_FIND_RENAMES\nGIT_DIFF_FIND_RENAMES_FROM_REWRITES\nGIT_DIFF_FIND_COPIES\nGIT_DIFF_FIND_COPIES_FROM_UNMODIFIED\nGIT_DIFF_FIND_REWRITES\nGIT_DIFF_BREAK_REWRITES\nGIT_DIFF_FIND_AND_BREAK_REWRITES\nGIT_DIFF_FIND_FOR_UNTRACKED\nGIT_DIFF_FIND_ALL\nGIT_DIFF_FIND_IGNORE_LEADING_WHITESPACE\nGIT_DIFF_FIND_IGNORE_WHITESPACE\nGIT_DIFF_FIND_DONT_IGNORE_WHITESPACE\nGIT_DIFF_FIND_EXACT_MATCH_ONLY\nGIT_DIFF_BREAK_REWRITES_FOR_RENAMES_ONLY\nGIT_DIFF_FIND_REMOVE_UNMODIFIED", + "tdef": "typedef", + "description": " Flags to control the behavior of diff rename/copy detection.", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_DIFF_FIND_BY_CONFIG", + "comments": "

Obey diff.renames. Overridden by any other GIT_DIFF_FIND_... flag.

\n", + "value": 0 + }, + { + "type": "int", + "name": "GIT_DIFF_FIND_RENAMES", + "comments": "

Look for renames? (--find-renames)

\n", + "value": 1 + }, + { + "type": "int", + "name": "GIT_DIFF_FIND_RENAMES_FROM_REWRITES", + "comments": "

Consider old side of MODIFIED for renames? (--break-rewrites=N)

\n", + "value": 2 + }, + { + "type": "int", + "name": "GIT_DIFF_FIND_COPIES", + "comments": "

Look for copies? (a la --find-copies).

\n", + "value": 4 + }, + { + "type": "int", + "name": "GIT_DIFF_FIND_COPIES_FROM_UNMODIFIED", + "comments": "

Consider UNMODIFIED as copy sources? (--find-copies-harder).

\n\n

For this to work correctly, use GIT_DIFF_INCLUDE_UNMODIFIED when\n the initial git_diff is being generated.

\n", + "value": 8 + }, + { + "type": "int", + "name": "GIT_DIFF_FIND_REWRITES", + "comments": "

Mark significant rewrites for split (--break-rewrites=/M)

\n", + "value": 16 + }, + { + "type": "int", + "name": "GIT_DIFF_BREAK_REWRITES", + "comments": "

Actually split large rewrites into delete/add pairs

\n", + "value": 32 + }, + { + "type": "int", + "name": "GIT_DIFF_FIND_AND_BREAK_REWRITES", + "comments": "

Mark rewrites for split and break into delete/add pairs

\n", + "value": 48 + }, + { + "type": "int", + "name": "GIT_DIFF_FIND_FOR_UNTRACKED", + "comments": "

Find renames/copies for UNTRACKED items in working directory.

\n\n

For this to work correctly, use GIT_DIFF_INCLUDE_UNTRACKED when the\n initial git_diff is being generated (and obviously the diff must\n be against the working directory for this to make sense).

\n", + "value": 64 + }, + { + "type": "int", + "name": "GIT_DIFF_FIND_ALL", + "comments": "

Turn on all finding features.

\n", + "value": 255 + }, + { + "type": "int", + "name": "GIT_DIFF_FIND_IGNORE_LEADING_WHITESPACE", + "comments": "

Measure similarity ignoring leading whitespace (default)

\n", + "value": 0 + }, + { + "type": "int", + "name": "GIT_DIFF_FIND_IGNORE_WHITESPACE", + "comments": "

Measure similarity ignoring all whitespace

\n", + "value": 4096 + }, + { + "type": "int", + "name": "GIT_DIFF_FIND_DONT_IGNORE_WHITESPACE", + "comments": "

Measure similarity including all data

\n", + "value": 8192 + }, + { + "type": "int", + "name": "GIT_DIFF_FIND_EXACT_MATCH_ONLY", + "comments": "

Measure similarity only by comparing SHAs (fast and cheap)

\n", + "value": 16384 + }, + { + "type": "int", + "name": "GIT_DIFF_BREAK_REWRITES_FOR_RENAMES_ONLY", + "comments": "

Do not break rewrites unless they contribute to a rename.

\n\n

Normally, GIT_DIFF_FIND_AND_BREAK_REWRITES will measure the self-\n similarity of modified files and split the ones that have changed a\n lot into a DELETE / ADD pair. Then the sides of that pair will be\n considered candidates for rename and copy detection.

\n\n

If you add this flag in and the split pair is not used for an\n actual rename or copy, then the modified record will be restored to\n a regular MODIFIED record instead of being split.

\n", + "value": 32768 + }, + { + "type": "int", + "name": "GIT_DIFF_FIND_REMOVE_UNMODIFIED", + "comments": "

Remove any UNMODIFIED deltas after find_similar is done.

\n\n

Using GIT_DIFF_FIND_COPIES_FROM_UNMODIFIED to emulate the\n --find-copies-harder behavior requires building a diff with the\n GIT_DIFF_INCLUDE_UNMODIFIED flag. If you do not want UNMODIFIED\n records in the final result, pass this flag to have them removed.

\n", + "value": 65536 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_diff_flag_t", + { + "decl": [ + "GIT_DIFF_FLAG_BINARY", + "GIT_DIFF_FLAG_NOT_BINARY", + "GIT_DIFF_FLAG_VALID_ID", + "GIT_DIFF_FLAG_EXISTS" + ], + "type": "enum", + "file": "diff.h", + "line": 229, + "lineto": 234, + "block": "GIT_DIFF_FLAG_BINARY\nGIT_DIFF_FLAG_NOT_BINARY\nGIT_DIFF_FLAG_VALID_ID\nGIT_DIFF_FLAG_EXISTS", + "tdef": "typedef", + "description": " Flags for the delta object and the file objects on each side.", + "comments": "

These flags are used for both the flags value of the git_diff_delta and the flags for the git_diff_file objects representing the old and new sides of the delta. Values outside of this public range should be considered reserved for internal or future use.

\n", + "fields": [ + { + "type": "int", + "name": "GIT_DIFF_FLAG_BINARY", + "comments": "

file(s) treated as binary data

\n", + "value": 1 + }, + { + "type": "int", + "name": "GIT_DIFF_FLAG_NOT_BINARY", + "comments": "

file(s) treated as text data

\n", + "value": 2 + }, + { + "type": "int", + "name": "GIT_DIFF_FLAG_VALID_ID", + "comments": "

id value is known correct

\n", + "value": 4 + }, + { + "type": "int", + "name": "GIT_DIFF_FLAG_EXISTS", + "comments": "

file exists at this side of the delta

\n", + "value": 8 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_diff_format_email_flags_t", + { + "decl": [ + "GIT_DIFF_FORMAT_EMAIL_NONE", + "GIT_DIFF_FORMAT_EMAIL_EXCLUDE_SUBJECT_PATCH_MARKER" + ], + "type": "enum", + "file": "diff.h", + "line": 1260, + "lineto": 1267, + "block": "GIT_DIFF_FORMAT_EMAIL_NONE\nGIT_DIFF_FORMAT_EMAIL_EXCLUDE_SUBJECT_PATCH_MARKER", + "tdef": "typedef", + "description": " Formatting options for diff e-mail generation", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_DIFF_FORMAT_EMAIL_NONE", + "comments": "

Normal patch, the default

\n", + "value": 0 + }, + { + "type": "int", + "name": "GIT_DIFF_FORMAT_EMAIL_EXCLUDE_SUBJECT_PATCH_MARKER", + "comments": "

Don't insert "[PATCH]" in the subject header

\n", + "value": 1 + } + ], + "used": { + "returns": [], + "needs": [ + "git_diff_commit_as_email" + ] + } + } + ], + [ + "git_diff_format_email_options", + { + "decl": [ + "unsigned int version", + "git_diff_format_email_flags_t flags", + "size_t patch_no", + "size_t total_patches", + "const git_oid * id", + "const char * summary", + "const char * body", + "const git_signature * author" + ], + "type": "struct", + "value": "git_diff_format_email_options", + "file": "diff.h", + "line": 1272, + "lineto": 1294, + "block": "unsigned int version\ngit_diff_format_email_flags_t flags\nsize_t patch_no\nsize_t total_patches\nconst git_oid * id\nconst char * summary\nconst char * body\nconst git_signature * author", + "tdef": "typedef", + "description": " Options for controlling the formatting of the generated e-mail.", + "comments": "", + "fields": [ + { + "type": "unsigned int", + "name": "version", + "comments": "" + }, + { + "type": "git_diff_format_email_flags_t", + "name": "flags", + "comments": "" + }, + { + "type": "size_t", + "name": "patch_no", + "comments": " This patch number " + }, + { + "type": "size_t", + "name": "total_patches", + "comments": " Total number of patches in this series " + }, + { + "type": "const git_oid *", + "name": "id", + "comments": " id to use for the commit " + }, + { + "type": "const char *", + "name": "summary", + "comments": " Summary of the change " + }, + { + "type": "const char *", + "name": "body", + "comments": " Commit message's body " + }, + { + "type": "const git_signature *", + "name": "author", + "comments": " Author of the change " + } + ], + "used": { + "returns": [], + "needs": [ + "git_diff_format_email", + "git_diff_format_email_init_options" + ] + } + } + ], + [ + "git_diff_format_t", + { + "decl": [ + "GIT_DIFF_FORMAT_PATCH", + "GIT_DIFF_FORMAT_PATCH_HEADER", + "GIT_DIFF_FORMAT_RAW", + "GIT_DIFF_FORMAT_NAME_ONLY", + "GIT_DIFF_FORMAT_NAME_STATUS" + ], + "type": "enum", + "file": "diff.h", + "line": 1023, + "lineto": 1029, + "block": "GIT_DIFF_FORMAT_PATCH\nGIT_DIFF_FORMAT_PATCH_HEADER\nGIT_DIFF_FORMAT_RAW\nGIT_DIFF_FORMAT_NAME_ONLY\nGIT_DIFF_FORMAT_NAME_STATUS", + "tdef": "typedef", + "description": " Possible output formats for diff data", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_DIFF_FORMAT_PATCH", + "comments": "

full git diff

\n", + "value": 1 + }, + { + "type": "int", + "name": "GIT_DIFF_FORMAT_PATCH_HEADER", + "comments": "

just the file headers of patch

\n", + "value": 2 + }, + { + "type": "int", + "name": "GIT_DIFF_FORMAT_RAW", + "comments": "

like git diff --raw

\n", + "value": 3 + }, + { + "type": "int", + "name": "GIT_DIFF_FORMAT_NAME_ONLY", + "comments": "

like git diff --name-only

\n", + "value": 4 + }, + { + "type": "int", + "name": "GIT_DIFF_FORMAT_NAME_STATUS", + "comments": "

like git diff --name-status

\n", + "value": 5 + } + ], + "used": { + "returns": [], + "needs": [ + "git_diff_print" + ] + } + } + ], + [ + "git_diff_hunk", + { + "decl": [ + "int old_start", + "int old_lines", + "int new_start", + "int new_lines", + "size_t header_len", + "char [128] header" + ], + "type": "struct", + "value": "git_diff_hunk", + "file": "diff.h", + "line": 501, + "lineto": 508, + "block": "int old_start\nint old_lines\nint new_start\nint new_lines\nsize_t header_len\nchar [128] header", + "tdef": "typedef", + "description": " Structure describing a hunk of a diff.", + "comments": "", + "fields": [ + { + "type": "int", + "name": "old_start", + "comments": " Starting line number in old_file " + }, + { + "type": "int", + "name": "old_lines", + "comments": " Number of lines in old_file " + }, + { + "type": "int", + "name": "new_start", + "comments": " Starting line number in new_file " + }, + { + "type": "int", + "name": "new_lines", + "comments": " Number of lines in new_file " + }, + { + "type": "size_t", + "name": "header_len", + "comments": " Number of bytes in header text " + }, + { + "type": "char [128]", + "name": "header", + "comments": " Header text, NUL-byte terminated " + } + ], + "used": { + "returns": [], + "needs": [ + "git_diff_blob_to_buffer", + "git_diff_blobs", + "git_diff_buffers", + "git_diff_foreach", + "git_diff_hunk_cb", + "git_diff_line_cb", + "git_diff_print_callback__to_buf", + "git_diff_print_callback__to_file_handle", + "git_patch_get_hunk" + ] + } + } + ], + [ + "git_diff_line", + { + "decl": [ + "char origin", + "int old_lineno", + "int new_lineno", + "int num_lines", + "size_t content_len", + "git_off_t content_offset", + "const char * content" + ], + "type": "struct", + "value": "git_diff_line", + "file": "diff.h", + "line": 548, + "lineto": 556, + "block": "char origin\nint old_lineno\nint new_lineno\nint num_lines\nsize_t content_len\ngit_off_t content_offset\nconst char * content", + "tdef": "typedef", + "description": " Structure describing a line (or data span) of a diff.", + "comments": "", + "fields": [ + { + "type": "char", + "name": "origin", + "comments": " A git_diff_line_t value " + }, + { + "type": "int", + "name": "old_lineno", + "comments": " Line number in old file or -1 for added line " + }, + { + "type": "int", + "name": "new_lineno", + "comments": " Line number in new file or -1 for deleted line " + }, + { + "type": "int", + "name": "num_lines", + "comments": " Number of newline characters in content " + }, + { + "type": "size_t", + "name": "content_len", + "comments": " Number of bytes of data " + }, + { + "type": "git_off_t", + "name": "content_offset", + "comments": " Offset in the original file to the content " + }, + { + "type": "const char *", + "name": "content", + "comments": " Pointer to diff text, not NUL-byte terminated " + } + ], + "used": { + "returns": [], + "needs": [ + "git_diff_blob_to_buffer", + "git_diff_blobs", + "git_diff_buffers", + "git_diff_foreach", + "git_diff_line_cb", + "git_diff_print", + "git_diff_print_callback__to_buf", + "git_diff_print_callback__to_file_handle", + "git_patch_get_line_in_hunk", + "git_patch_print" + ] + } + } + ], + [ + "git_diff_line_t", + { + "decl": [ + "GIT_DIFF_LINE_CONTEXT", + "GIT_DIFF_LINE_ADDITION", + "GIT_DIFF_LINE_DELETION", + "GIT_DIFF_LINE_CONTEXT_EOFNL", + "GIT_DIFF_LINE_ADD_EOFNL", + "GIT_DIFF_LINE_DEL_EOFNL", + "GIT_DIFF_LINE_FILE_HDR", + "GIT_DIFF_LINE_HUNK_HDR", + "GIT_DIFF_LINE_BINARY" + ], + "type": "enum", + "file": "diff.h", + "line": 527, + "lineto": 543, + "block": "GIT_DIFF_LINE_CONTEXT\nGIT_DIFF_LINE_ADDITION\nGIT_DIFF_LINE_DELETION\nGIT_DIFF_LINE_CONTEXT_EOFNL\nGIT_DIFF_LINE_ADD_EOFNL\nGIT_DIFF_LINE_DEL_EOFNL\nGIT_DIFF_LINE_FILE_HDR\nGIT_DIFF_LINE_HUNK_HDR\nGIT_DIFF_LINE_BINARY", + "tdef": "typedef", + "description": " Line origin constants.", + "comments": "

These values describe where a line came from and will be passed to the git_diff_line_cb when iterating over a diff. There are some special origin constants at the end that are used for the text output callbacks to demarcate lines that are actually part of the file or hunk headers.

\n", + "fields": [ + { + "type": "int", + "name": "GIT_DIFF_LINE_CONTEXT", + "comments": "", + "value": 32 + }, + { + "type": "int", + "name": "GIT_DIFF_LINE_ADDITION", + "comments": "", + "value": 43 + }, + { + "type": "int", + "name": "GIT_DIFF_LINE_DELETION", + "comments": "", + "value": 45 + }, + { + "type": "int", + "name": "GIT_DIFF_LINE_CONTEXT_EOFNL", + "comments": "

Both files have no LF at end

\n", + "value": 61 + }, + { + "type": "int", + "name": "GIT_DIFF_LINE_ADD_EOFNL", + "comments": "

Old has no LF at end, new does

\n", + "value": 62 + }, + { + "type": "int", + "name": "GIT_DIFF_LINE_DEL_EOFNL", + "comments": "

Old has LF at end, new does not

\n", + "value": 60 + }, + { + "type": "int", + "name": "GIT_DIFF_LINE_FILE_HDR", + "comments": "", + "value": 70 + }, + { + "type": "int", + "name": "GIT_DIFF_LINE_HUNK_HDR", + "comments": "", + "value": 72 + }, + { + "type": "int", + "name": "GIT_DIFF_LINE_BINARY", + "comments": "

For "Binary files x and y differ"

\n", + "value": 66 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_diff_option_t", + { + "decl": [ + "GIT_DIFF_NORMAL", + "GIT_DIFF_REVERSE", + "GIT_DIFF_INCLUDE_IGNORED", + "GIT_DIFF_RECURSE_IGNORED_DIRS", + "GIT_DIFF_INCLUDE_UNTRACKED", + "GIT_DIFF_RECURSE_UNTRACKED_DIRS", + "GIT_DIFF_INCLUDE_UNMODIFIED", + "GIT_DIFF_INCLUDE_TYPECHANGE", + "GIT_DIFF_INCLUDE_TYPECHANGE_TREES", + "GIT_DIFF_IGNORE_FILEMODE", + "GIT_DIFF_IGNORE_SUBMODULES", + "GIT_DIFF_IGNORE_CASE", + "GIT_DIFF_INCLUDE_CASECHANGE", + "GIT_DIFF_DISABLE_PATHSPEC_MATCH", + "GIT_DIFF_SKIP_BINARY_CHECK", + "GIT_DIFF_ENABLE_FAST_UNTRACKED_DIRS", + "GIT_DIFF_UPDATE_INDEX", + "GIT_DIFF_INCLUDE_UNREADABLE", + "GIT_DIFF_INCLUDE_UNREADABLE_AS_UNTRACKED", + "GIT_DIFF_FORCE_TEXT", + "GIT_DIFF_FORCE_BINARY", + "GIT_DIFF_IGNORE_WHITESPACE", + "GIT_DIFF_IGNORE_WHITESPACE_CHANGE", + "GIT_DIFF_IGNORE_WHITESPACE_EOL", + "GIT_DIFF_SHOW_UNTRACKED_CONTENT", + "GIT_DIFF_SHOW_UNMODIFIED", + "GIT_DIFF_PATIENCE", + "GIT_DIFF_MINIMAL", + "GIT_DIFF_SHOW_BINARY" + ], + "type": "enum", + "file": "diff.h", + "line": 72, + "lineto": 209, + "block": "GIT_DIFF_NORMAL\nGIT_DIFF_REVERSE\nGIT_DIFF_INCLUDE_IGNORED\nGIT_DIFF_RECURSE_IGNORED_DIRS\nGIT_DIFF_INCLUDE_UNTRACKED\nGIT_DIFF_RECURSE_UNTRACKED_DIRS\nGIT_DIFF_INCLUDE_UNMODIFIED\nGIT_DIFF_INCLUDE_TYPECHANGE\nGIT_DIFF_INCLUDE_TYPECHANGE_TREES\nGIT_DIFF_IGNORE_FILEMODE\nGIT_DIFF_IGNORE_SUBMODULES\nGIT_DIFF_IGNORE_CASE\nGIT_DIFF_INCLUDE_CASECHANGE\nGIT_DIFF_DISABLE_PATHSPEC_MATCH\nGIT_DIFF_SKIP_BINARY_CHECK\nGIT_DIFF_ENABLE_FAST_UNTRACKED_DIRS\nGIT_DIFF_UPDATE_INDEX\nGIT_DIFF_INCLUDE_UNREADABLE\nGIT_DIFF_INCLUDE_UNREADABLE_AS_UNTRACKED\nGIT_DIFF_FORCE_TEXT\nGIT_DIFF_FORCE_BINARY\nGIT_DIFF_IGNORE_WHITESPACE\nGIT_DIFF_IGNORE_WHITESPACE_CHANGE\nGIT_DIFF_IGNORE_WHITESPACE_EOL\nGIT_DIFF_SHOW_UNTRACKED_CONTENT\nGIT_DIFF_SHOW_UNMODIFIED\nGIT_DIFF_PATIENCE\nGIT_DIFF_MINIMAL\nGIT_DIFF_SHOW_BINARY", + "tdef": "typedef", + "description": " Flags for diff options. A combination of these flags can be passed\n in via the `flags` value in the `git_diff_options`.", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_DIFF_NORMAL", + "comments": "

Normal diff, the default

\n", + "value": 0 + }, + { + "type": "int", + "name": "GIT_DIFF_REVERSE", + "comments": "

Reverse the sides of the diff

\n", + "value": 1 + }, + { + "type": "int", + "name": "GIT_DIFF_INCLUDE_IGNORED", + "comments": "

Include ignored files in the diff

\n", + "value": 2 + }, + { + "type": "int", + "name": "GIT_DIFF_RECURSE_IGNORED_DIRS", + "comments": "

Even with GIT_DIFF_INCLUDE_IGNORED, an entire ignored directory\n will be marked with only a single entry in the diff; this flag\n adds all files under the directory as IGNORED entries, too.

\n", + "value": 4 + }, + { + "type": "int", + "name": "GIT_DIFF_INCLUDE_UNTRACKED", + "comments": "

Include untracked files in the diff

\n", + "value": 8 + }, + { + "type": "int", + "name": "GIT_DIFF_RECURSE_UNTRACKED_DIRS", + "comments": "

Even with GIT_DIFF_INCLUDE_UNTRACKED, an entire untracked\n directory will be marked with only a single entry in the diff\n (a la what core Git does in git status); this flag adds all\n files under untracked directories as UNTRACKED entries, too.

\n", + "value": 16 + }, + { + "type": "int", + "name": "GIT_DIFF_INCLUDE_UNMODIFIED", + "comments": "

Include unmodified files in the diff

\n", + "value": 32 + }, + { + "type": "int", + "name": "GIT_DIFF_INCLUDE_TYPECHANGE", + "comments": "

Normally, a type change between files will be converted into a\n DELETED record for the old and an ADDED record for the new; this\n options enabled the generation of TYPECHANGE delta records.

\n", + "value": 64 + }, + { + "type": "int", + "name": "GIT_DIFF_INCLUDE_TYPECHANGE_TREES", + "comments": "

Even with GIT_DIFF_INCLUDE_TYPECHANGE, blob->tree changes still\n generally show as a DELETED blob. This flag tries to correctly\n label blob->tree transitions as TYPECHANGE records with new_file's\n mode set to tree. Note: the tree SHA will not be available.

\n", + "value": 128 + }, + { + "type": "int", + "name": "GIT_DIFF_IGNORE_FILEMODE", + "comments": "

Ignore file mode changes

\n", + "value": 256 + }, + { + "type": "int", + "name": "GIT_DIFF_IGNORE_SUBMODULES", + "comments": "

Treat all submodules as unmodified

\n", + "value": 512 + }, + { + "type": "int", + "name": "GIT_DIFF_IGNORE_CASE", + "comments": "

Use case insensitive filename comparisons

\n", + "value": 1024 + }, + { + "type": "int", + "name": "GIT_DIFF_INCLUDE_CASECHANGE", + "comments": "

May be combined with GIT_DIFF_IGNORE_CASE to specify that a file\n that has changed case will be returned as an add/delete pair.

\n", + "value": 2048 + }, + { + "type": "int", + "name": "GIT_DIFF_DISABLE_PATHSPEC_MATCH", + "comments": "

If the pathspec is set in the diff options, this flags indicates\n that the paths will be treated as literal paths instead of\n fnmatch patterns. Each path in the list must either be a full\n path to a file or a directory. (A trailing slash indicates that\n the path will only match a directory). If a directory is\n specified, all children will be included.

\n", + "value": 4096 + }, + { + "type": "int", + "name": "GIT_DIFF_SKIP_BINARY_CHECK", + "comments": "

Disable updating of the binary flag in delta records. This is\n useful when iterating over a diff if you don't need hunk and data\n callbacks and want to avoid having to load file completely.

\n", + "value": 8192 + }, + { + "type": "int", + "name": "GIT_DIFF_ENABLE_FAST_UNTRACKED_DIRS", + "comments": "

When diff finds an untracked directory, to match the behavior of\n core Git, it scans the contents for IGNORED and UNTRACKED files.\n If all contents are IGNORED, then the directory is IGNORED; if\n any contents are not IGNORED, then the directory is UNTRACKED.\n This is extra work that may not matter in many cases. This flag\n turns off that scan and immediately labels an untracked directory\n as UNTRACKED (changing the behavior to not match core Git).

\n", + "value": 16384 + }, + { + "type": "int", + "name": "GIT_DIFF_UPDATE_INDEX", + "comments": "

When diff finds a file in the working directory with stat\n information different from the index, but the OID ends up being the\n same, write the correct stat information into the index. Note:\n without this flag, diff will always leave the index untouched.

\n", + "value": 32768 + }, + { + "type": "int", + "name": "GIT_DIFF_INCLUDE_UNREADABLE", + "comments": "

Include unreadable files in the diff

\n", + "value": 65536 + }, + { + "type": "int", + "name": "GIT_DIFF_INCLUDE_UNREADABLE_AS_UNTRACKED", + "comments": "

Include unreadable files in the diff

\n", + "value": 131072 + }, + { + "type": "int", + "name": "GIT_DIFF_FORCE_TEXT", + "comments": "

Treat all files as text, disabling binary attributes \n&\n detection

\n", + "value": 1048576 + }, + { + "type": "int", + "name": "GIT_DIFF_FORCE_BINARY", + "comments": "

Treat all files as binary, disabling text diffs

\n", + "value": 2097152 + }, + { + "type": "int", + "name": "GIT_DIFF_IGNORE_WHITESPACE", + "comments": "

Ignore all whitespace

\n", + "value": 4194304 + }, + { + "type": "int", + "name": "GIT_DIFF_IGNORE_WHITESPACE_CHANGE", + "comments": "

Ignore changes in amount of whitespace

\n", + "value": 8388608 + }, + { + "type": "int", + "name": "GIT_DIFF_IGNORE_WHITESPACE_EOL", + "comments": "

Ignore whitespace at end of line

\n", + "value": 16777216 + }, + { + "type": "int", + "name": "GIT_DIFF_SHOW_UNTRACKED_CONTENT", + "comments": "

When generating patch text, include the content of untracked\n files. This automatically turns on GIT_DIFF_INCLUDE_UNTRACKED but\n it does not turn on GIT_DIFF_RECURSE_UNTRACKED_DIRS. Add that\n flag if you want the content of every single UNTRACKED file.

\n", + "value": 33554432 + }, + { + "type": "int", + "name": "GIT_DIFF_SHOW_UNMODIFIED", + "comments": "

When generating output, include the names of unmodified files if\n they are included in the git_diff. Normally these are skipped in\n the formats that list files (e.g. name-only, name-status, raw).\n Even with this, these will not be included in patch format.

\n", + "value": 67108864 + }, + { + "type": "int", + "name": "GIT_DIFF_PATIENCE", + "comments": "

Use the "patience diff" algorithm

\n", + "value": 268435456 + }, + { + "type": "int", + "name": "GIT_DIFF_MINIMAL", + "comments": "

Take extra time to find minimal diff

\n", + "value": 536870912 + }, + { + "type": "int", + "name": "GIT_DIFF_SHOW_BINARY", + "comments": "

Include the necessary deflate / delta information so that git-apply\n can apply given diff information to binary files.

\n", + "value": 1073741824 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_diff_options", + { + "decl": [ + "unsigned int version", + "uint32_t flags", + "git_submodule_ignore_t ignore_submodules", + "git_strarray pathspec", + "git_diff_notify_cb notify_cb", + "git_diff_progress_cb progress_cb", + "void * payload", + "uint32_t context_lines", + "uint32_t interhunk_lines", + "uint16_t id_abbrev", + "git_off_t max_size", + "const char * old_prefix", + "const char * new_prefix" + ], + "type": "struct", + "value": "git_diff_options", + "file": "diff.h", + "line": 396, + "lineto": 416, + "block": "unsigned int version\nuint32_t flags\ngit_submodule_ignore_t ignore_submodules\ngit_strarray pathspec\ngit_diff_notify_cb notify_cb\ngit_diff_progress_cb progress_cb\nvoid * payload\nuint32_t context_lines\nuint32_t interhunk_lines\nuint16_t id_abbrev\ngit_off_t max_size\nconst char * old_prefix\nconst char * new_prefix", + "tdef": "typedef", + "description": " Structure describing options about how the diff should be executed.", + "comments": "

Setting all values of the structure to zero will yield the default values. Similarly, passing NULL for the options structure will give the defaults. The default values are marked below.

\n\n
    \n
  • flags is a combination of the git_diff_option_t values above - context_lines is the number of unchanged lines that define the boundary of a hunk (and to display before and after) - interhunk_lines is the maximum number of unchanged lines between hunk boundaries before the hunks will be merged into a one. - old_prefix is the virtual "directory" to prefix to old file names in hunk headers (default "a") - new_prefix is the virtual "directory" to prefix to new file names in hunk headers (default "b") - pathspec is an array of paths / fnmatch patterns to constrain diff - max_size is a file size (in bytes) above which a blob will be marked as binary automatically; pass a negative value to disable. - notify_cb is an optional callback function, notifying the consumer of changes to the diff as new deltas are added. - progress_cb is an optional callback function, notifying the consumer of which files are being examined as the diff is generated. - payload is the payload to pass to the callback functions. - ignore_submodules overrides the submodule ignore setting for all submodules in the diff.
  • \n
\n", + "fields": [ + { + "type": "unsigned int", + "name": "version", + "comments": " version for the struct " + }, + { + "type": "uint32_t", + "name": "flags", + "comments": " defaults to GIT_DIFF_NORMAL " + }, + { + "type": "git_submodule_ignore_t", + "name": "ignore_submodules", + "comments": " submodule ignore rule " + }, + { + "type": "git_strarray", + "name": "pathspec", + "comments": " defaults to include all paths " + }, + { + "type": "git_diff_notify_cb", + "name": "notify_cb", + "comments": "" + }, + { + "type": "git_diff_progress_cb", + "name": "progress_cb", + "comments": "" + }, + { + "type": "void *", + "name": "payload", + "comments": "" + }, + { + "type": "uint32_t", + "name": "context_lines", + "comments": " defaults to 3 " + }, + { + "type": "uint32_t", + "name": "interhunk_lines", + "comments": " defaults to 0 " + }, + { + "type": "uint16_t", + "name": "id_abbrev", + "comments": " default 'core.abbrev' or 7 if unset " + }, + { + "type": "git_off_t", + "name": "max_size", + "comments": " defaults to 512MB " + }, + { + "type": "const char *", + "name": "old_prefix", + "comments": " defaults to \"a\" " + }, + { + "type": "const char *", + "name": "new_prefix", + "comments": " defaults to \"b\" " + } + ], + "used": { + "returns": [], + "needs": [ + "git_diff_blob_to_buffer", + "git_diff_blobs", + "git_diff_buffers", + "git_diff_commit_as_email", + "git_diff_index_to_index", + "git_diff_index_to_workdir", + "git_diff_init_options", + "git_diff_tree_to_index", + "git_diff_tree_to_tree", + "git_diff_tree_to_workdir", + "git_diff_tree_to_workdir_with_index", + "git_patch_from_blob_and_buffer", + "git_patch_from_blobs", + "git_patch_from_buffers" + ] + } + } + ], + [ + "git_diff_similarity_metric", + { + "decl": [ + "int (*)(void **, const git_diff_file *, const char *, void *) file_signature", + "int (*)(void **, const git_diff_file *, const char *, size_t, void *) buffer_signature", + "void (*)(void *, void *) free_signature", + "int (*)(int *, void *, void *, void *) similarity", + "void * payload" + ], + "type": "struct", + "value": "git_diff_similarity_metric", + "file": "diff.h", + "line": 649, + "lineto": 659, + "block": "int (*)(void **, const git_diff_file *, const char *, void *) file_signature\nint (*)(void **, const git_diff_file *, const char *, size_t, void *) buffer_signature\nvoid (*)(void *, void *) free_signature\nint (*)(int *, void *, void *, void *) similarity\nvoid * payload", + "tdef": "typedef", + "description": " Pluggable similarity metric", + "comments": "", + "fields": [ + { + "type": "int (*)(void **, const git_diff_file *, const char *, void *)", + "name": "file_signature", + "comments": "" + }, + { + "type": "int (*)(void **, const git_diff_file *, const char *, size_t, void *)", + "name": "buffer_signature", + "comments": "" + }, + { + "type": "void (*)(void *, void *)", + "name": "free_signature", + "comments": "" + }, + { + "type": "int (*)(int *, void *, void *, void *)", + "name": "similarity", + "comments": "" + }, + { + "type": "void *", + "name": "payload", + "comments": "" + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_diff_stats", + { + "decl": "git_diff_stats", + "type": "struct", + "value": "git_diff_stats", + "file": "diff.h", + "line": 1174, + "lineto": 1174, + "tdef": "typedef", + "description": " This is an opaque structure which is allocated by `git_diff_get_stats`.\n You are responsible for releasing the object memory when done, using the\n `git_diff_stats_free()` function.", + "comments": "", + "used": { + "returns": [], + "needs": [ + "git_diff_get_stats", + "git_diff_stats_deletions", + "git_diff_stats_files_changed", + "git_diff_stats_free", + "git_diff_stats_insertions", + "git_diff_stats_to_buf" + ] + } + } + ], + [ + "git_diff_stats_format_t", + { + "decl": [ + "GIT_DIFF_STATS_NONE", + "GIT_DIFF_STATS_FULL", + "GIT_DIFF_STATS_SHORT", + "GIT_DIFF_STATS_NUMBER", + "GIT_DIFF_STATS_INCLUDE_SUMMARY" + ], + "type": "enum", + "file": "diff.h", + "line": 1179, + "lineto": 1194, + "block": "GIT_DIFF_STATS_NONE\nGIT_DIFF_STATS_FULL\nGIT_DIFF_STATS_SHORT\nGIT_DIFF_STATS_NUMBER\nGIT_DIFF_STATS_INCLUDE_SUMMARY", + "tdef": "typedef", + "description": " Formatting options for diff stats", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_DIFF_STATS_NONE", + "comments": "

No stats

\n", + "value": 0 + }, + { + "type": "int", + "name": "GIT_DIFF_STATS_FULL", + "comments": "

Full statistics, equivalent of --stat

\n", + "value": 1 + }, + { + "type": "int", + "name": "GIT_DIFF_STATS_SHORT", + "comments": "

Short statistics, equivalent of --shortstat

\n", + "value": 2 + }, + { + "type": "int", + "name": "GIT_DIFF_STATS_NUMBER", + "comments": "

Number statistics, equivalent of --numstat

\n", + "value": 4 + }, + { + "type": "int", + "name": "GIT_DIFF_STATS_INCLUDE_SUMMARY", + "comments": "

Extended header information such as creations, renames and mode changes, equivalent of --summary

\n", + "value": 8 + } + ], + "used": { + "returns": [], + "needs": [ + "git_diff_stats_to_buf" + ] + } + } + ], + [ + "git_direction", + { + "decl": [ + "GIT_DIRECTION_FETCH", + "GIT_DIRECTION_PUSH" + ], + "type": "enum", + "file": "net.h", + "line": 31, + "lineto": 34, + "block": "GIT_DIRECTION_FETCH\nGIT_DIRECTION_PUSH", + "tdef": "typedef", + "description": " Direction of the connection.", + "comments": "

We need this because we need to know whether we should call git-upload-pack or git-receive-pack on the remote end when get_refs gets called.

\n", + "fields": [ + { + "type": "int", + "name": "GIT_DIRECTION_FETCH", + "comments": "", + "value": 0 + }, + { + "type": "int", + "name": "GIT_DIRECTION_PUSH", + "comments": "", + "value": 1 + } + ], + "used": { + "returns": [ + "git_refspec_direction" + ], + "needs": [ + "git_remote_connect" + ] + } + } + ], + [ + "git_error", + { + "decl": [ + "char * message", + "int klass" + ], + "type": "struct", + "value": "git_error", + "file": "errors.h", + "line": 64, + "lineto": 67, + "block": "char * message\nint klass", + "tdef": "typedef", + "description": " Structure to store extra details of the last error that occurred.", + "comments": "

This is kept on a per-thread basis if GIT_THREADS was defined when the library was build, otherwise one is kept globally for the library

\n", + "fields": [ + { + "type": "char *", + "name": "message", + "comments": "" + }, + { + "type": "int", + "name": "klass", + "comments": "" + } + ], + "used": { + "returns": [ + "giterr_last" + ], + "needs": [] + } + } + ], + [ + "git_error_code", + { + "decl": [ + "GIT_OK", + "GIT_ERROR", + "GIT_ENOTFOUND", + "GIT_EEXISTS", + "GIT_EAMBIGUOUS", + "GIT_EBUFS", + "GIT_EUSER", + "GIT_EBAREREPO", + "GIT_EUNBORNBRANCH", + "GIT_EUNMERGED", + "GIT_ENONFASTFORWARD", + "GIT_EINVALIDSPEC", + "GIT_ECONFLICT", + "GIT_ELOCKED", + "GIT_EMODIFIED", + "GIT_EAUTH", + "GIT_ECERTIFICATE", + "GIT_EAPPLIED", + "GIT_EPEEL", + "GIT_EEOF", + "GIT_EINVALID", + "GIT_EUNCOMMITTED", + "GIT_EDIRECTORY", + "GIT_EMERGECONFLICT", + "GIT_PASSTHROUGH", + "GIT_ITEROVER" + ], + "type": "enum", + "file": "errors.h", + "line": 21, + "lineto": 56, + "block": "GIT_OK\nGIT_ERROR\nGIT_ENOTFOUND\nGIT_EEXISTS\nGIT_EAMBIGUOUS\nGIT_EBUFS\nGIT_EUSER\nGIT_EBAREREPO\nGIT_EUNBORNBRANCH\nGIT_EUNMERGED\nGIT_ENONFASTFORWARD\nGIT_EINVALIDSPEC\nGIT_ECONFLICT\nGIT_ELOCKED\nGIT_EMODIFIED\nGIT_EAUTH\nGIT_ECERTIFICATE\nGIT_EAPPLIED\nGIT_EPEEL\nGIT_EEOF\nGIT_EINVALID\nGIT_EUNCOMMITTED\nGIT_EDIRECTORY\nGIT_EMERGECONFLICT\nGIT_PASSTHROUGH\nGIT_ITEROVER", + "tdef": "typedef", + "description": " Generic return codes ", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_OK", + "comments": "

No error

\n", + "value": 0 + }, + { + "type": "int", + "name": "GIT_ERROR", + "comments": "

Generic error

\n", + "value": -1 + }, + { + "type": "int", + "name": "GIT_ENOTFOUND", + "comments": "

Requested object could not be found

\n", + "value": -3 + }, + { + "type": "int", + "name": "GIT_EEXISTS", + "comments": "

Object exists preventing operation

\n", + "value": -4 + }, + { + "type": "int", + "name": "GIT_EAMBIGUOUS", + "comments": "

More than one object matches

\n", + "value": -5 + }, + { + "type": "int", + "name": "GIT_EBUFS", + "comments": "

Output buffer too short to hold data

\n", + "value": -6 + }, + { + "type": "int", + "name": "GIT_EUSER", + "comments": "", + "value": -7 + }, + { + "type": "int", + "name": "GIT_EBAREREPO", + "comments": "

Operation not allowed on bare repository

\n", + "value": -8 + }, + { + "type": "int", + "name": "GIT_EUNBORNBRANCH", + "comments": "

HEAD refers to branch with no commits

\n", + "value": -9 + }, + { + "type": "int", + "name": "GIT_EUNMERGED", + "comments": "

Merge in progress prevented operation

\n", + "value": -10 + }, + { + "type": "int", + "name": "GIT_ENONFASTFORWARD", + "comments": "

Reference was not fast-forwardable

\n", + "value": -11 + }, + { + "type": "int", + "name": "GIT_EINVALIDSPEC", + "comments": "

Name/ref spec was not in a valid format

\n", + "value": -12 + }, + { + "type": "int", + "name": "GIT_ECONFLICT", + "comments": "

Checkout conflicts prevented operation

\n", + "value": -13 + }, + { + "type": "int", + "name": "GIT_ELOCKED", + "comments": "

Lock file prevented operation

\n", + "value": -14 + }, + { + "type": "int", + "name": "GIT_EMODIFIED", + "comments": "

Reference value does not match expected

\n", + "value": -15 + }, + { + "type": "int", + "name": "GIT_EAUTH", + "comments": "

Authentication error

\n", + "value": -16 + }, + { + "type": "int", + "name": "GIT_ECERTIFICATE", + "comments": "

Server certificate is invalid

\n", + "value": -17 + }, + { + "type": "int", + "name": "GIT_EAPPLIED", + "comments": "

Patch/merge has already been applied

\n", + "value": -18 + }, + { + "type": "int", + "name": "GIT_EPEEL", + "comments": "

The requested peel operation is not possible

\n", + "value": -19 + }, + { + "type": "int", + "name": "GIT_EEOF", + "comments": "

Unexpected EOF

\n", + "value": -20 + }, + { + "type": "int", + "name": "GIT_EINVALID", + "comments": "

Invalid operation or input

\n", + "value": -21 + }, + { + "type": "int", + "name": "GIT_EUNCOMMITTED", + "comments": "

Uncommitted changes in index prevented operation

\n", + "value": -22 + }, + { + "type": "int", + "name": "GIT_EDIRECTORY", + "comments": "

The operation is not valid for a directory

\n", + "value": -23 + }, + { + "type": "int", + "name": "GIT_EMERGECONFLICT", + "comments": "

A merge conflict exists and cannot continue

\n", + "value": -24 + }, + { + "type": "int", + "name": "GIT_PASSTHROUGH", + "comments": "

Internal only

\n", + "value": -30 + }, + { + "type": "int", + "name": "GIT_ITEROVER", + "comments": "

Signals end of iteration with iterator

\n", + "value": -31 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_error_t", + { + "decl": [ + "GITERR_NONE", + "GITERR_NOMEMORY", + "GITERR_OS", + "GITERR_INVALID", + "GITERR_REFERENCE", + "GITERR_ZLIB", + "GITERR_REPOSITORY", + "GITERR_CONFIG", + "GITERR_REGEX", + "GITERR_ODB", + "GITERR_INDEX", + "GITERR_OBJECT", + "GITERR_NET", + "GITERR_TAG", + "GITERR_TREE", + "GITERR_INDEXER", + "GITERR_SSL", + "GITERR_SUBMODULE", + "GITERR_THREAD", + "GITERR_STASH", + "GITERR_CHECKOUT", + "GITERR_FETCHHEAD", + "GITERR_MERGE", + "GITERR_SSH", + "GITERR_FILTER", + "GITERR_REVERT", + "GITERR_CALLBACK", + "GITERR_CHERRYPICK", + "GITERR_DESCRIBE", + "GITERR_REBASE", + "GITERR_FILESYSTEM" + ], + "type": "enum", + "file": "errors.h", + "line": 70, + "lineto": 102, + "block": "GITERR_NONE\nGITERR_NOMEMORY\nGITERR_OS\nGITERR_INVALID\nGITERR_REFERENCE\nGITERR_ZLIB\nGITERR_REPOSITORY\nGITERR_CONFIG\nGITERR_REGEX\nGITERR_ODB\nGITERR_INDEX\nGITERR_OBJECT\nGITERR_NET\nGITERR_TAG\nGITERR_TREE\nGITERR_INDEXER\nGITERR_SSL\nGITERR_SUBMODULE\nGITERR_THREAD\nGITERR_STASH\nGITERR_CHECKOUT\nGITERR_FETCHHEAD\nGITERR_MERGE\nGITERR_SSH\nGITERR_FILTER\nGITERR_REVERT\nGITERR_CALLBACK\nGITERR_CHERRYPICK\nGITERR_DESCRIBE\nGITERR_REBASE\nGITERR_FILESYSTEM", + "tdef": "typedef", + "description": " Error classes ", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GITERR_NONE", + "comments": "", + "value": 0 + }, + { + "type": "int", + "name": "GITERR_NOMEMORY", + "comments": "", + "value": 1 + }, + { + "type": "int", + "name": "GITERR_OS", + "comments": "", + "value": 2 + }, + { + "type": "int", + "name": "GITERR_INVALID", + "comments": "", + "value": 3 + }, + { + "type": "int", + "name": "GITERR_REFERENCE", + "comments": "", + "value": 4 + }, + { + "type": "int", + "name": "GITERR_ZLIB", + "comments": "", + "value": 5 + }, + { + "type": "int", + "name": "GITERR_REPOSITORY", + "comments": "", + "value": 6 + }, + { + "type": "int", + "name": "GITERR_CONFIG", + "comments": "", + "value": 7 + }, + { + "type": "int", + "name": "GITERR_REGEX", + "comments": "", + "value": 8 + }, + { + "type": "int", + "name": "GITERR_ODB", + "comments": "", + "value": 9 + }, + { + "type": "int", + "name": "GITERR_INDEX", + "comments": "", + "value": 10 + }, + { + "type": "int", + "name": "GITERR_OBJECT", + "comments": "", + "value": 11 + }, + { + "type": "int", + "name": "GITERR_NET", + "comments": "", + "value": 12 + }, + { + "type": "int", + "name": "GITERR_TAG", + "comments": "", + "value": 13 + }, + { + "type": "int", + "name": "GITERR_TREE", + "comments": "", + "value": 14 + }, + { + "type": "int", + "name": "GITERR_INDEXER", + "comments": "", + "value": 15 + }, + { + "type": "int", + "name": "GITERR_SSL", + "comments": "", + "value": 16 + }, + { + "type": "int", + "name": "GITERR_SUBMODULE", + "comments": "", + "value": 17 + }, + { + "type": "int", + "name": "GITERR_THREAD", + "comments": "", + "value": 18 + }, + { + "type": "int", + "name": "GITERR_STASH", + "comments": "", + "value": 19 + }, + { + "type": "int", + "name": "GITERR_CHECKOUT", + "comments": "", + "value": 20 + }, + { + "type": "int", + "name": "GITERR_FETCHHEAD", + "comments": "", + "value": 21 + }, + { + "type": "int", + "name": "GITERR_MERGE", + "comments": "", + "value": 22 + }, + { + "type": "int", + "name": "GITERR_SSH", + "comments": "", + "value": 23 + }, + { + "type": "int", + "name": "GITERR_FILTER", + "comments": "", + "value": 24 + }, + { + "type": "int", + "name": "GITERR_REVERT", + "comments": "", + "value": 25 + }, + { + "type": "int", + "name": "GITERR_CALLBACK", + "comments": "", + "value": 26 + }, + { + "type": "int", + "name": "GITERR_CHERRYPICK", + "comments": "", + "value": 27 + }, + { + "type": "int", + "name": "GITERR_DESCRIBE", + "comments": "", + "value": 28 + }, + { + "type": "int", + "name": "GITERR_REBASE", + "comments": "", + "value": 29 + }, + { + "type": "int", + "name": "GITERR_FILESYSTEM", + "comments": "", + "value": 30 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_feature_t", + { + "decl": [ + "GIT_FEATURE_THREADS", + "GIT_FEATURE_HTTPS", + "GIT_FEATURE_SSH", + "GIT_FEATURE_NSEC" + ], + "type": "enum", + "file": "common.h", + "line": 111, + "lineto": 116, + "block": "GIT_FEATURE_THREADS\nGIT_FEATURE_HTTPS\nGIT_FEATURE_SSH\nGIT_FEATURE_NSEC", + "tdef": "typedef", + "description": " Combinations of these values describe the features with which libgit2\n was compiled", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_FEATURE_THREADS", + "comments": "", + "value": 1 + }, + { + "type": "int", + "name": "GIT_FEATURE_HTTPS", + "comments": "", + "value": 2 + }, + { + "type": "int", + "name": "GIT_FEATURE_SSH", + "comments": "", + "value": 4 + }, + { + "type": "int", + "name": "GIT_FEATURE_NSEC", + "comments": "", + "value": 8 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_fetch_options", + { + "decl": [ + "int version", + "git_remote_callbacks callbacks", + "git_fetch_prune_t prune", + "int update_fetchhead", + "git_remote_autotag_option_t download_tags", + "git_proxy_options proxy_opts", + "git_strarray custom_headers" + ], + "type": "struct", + "value": "git_fetch_options", + "file": "remote.h", + "line": 525, + "lineto": 562, + "block": "int version\ngit_remote_callbacks callbacks\ngit_fetch_prune_t prune\nint update_fetchhead\ngit_remote_autotag_option_t download_tags\ngit_proxy_options proxy_opts\ngit_strarray custom_headers", + "tdef": "typedef", + "description": " Fetch options structure.", + "comments": "

Zero out for defaults. Initialize with GIT_FETCH_OPTIONS_INIT macro to correctly set the version field. E.g.

\n\n
    git_fetch_options opts = GIT_FETCH_OPTIONS_INIT;\n
\n", + "fields": [ + { + "type": "int", + "name": "version", + "comments": "" + }, + { + "type": "git_remote_callbacks", + "name": "callbacks", + "comments": " Callbacks to use for this fetch operation" + }, + { + "type": "git_fetch_prune_t", + "name": "prune", + "comments": " Whether to perform a prune after the fetch" + }, + { + "type": "int", + "name": "update_fetchhead", + "comments": " Whether to write the results to FETCH_HEAD. Defaults to\n on. Leave this default in order to behave like git." + }, + { + "type": "git_remote_autotag_option_t", + "name": "download_tags", + "comments": " Determines how to behave regarding tags on the remote, such\n as auto-downloading tags for objects we're downloading or\n downloading all of them.\n\n The default is to auto-follow tags." + }, + { + "type": "git_proxy_options", + "name": "proxy_opts", + "comments": " Proxy options to use, by default no proxy is used." + }, + { + "type": "git_strarray", + "name": "custom_headers", + "comments": " Extra headers for this fetch operation" + } + ], + "used": { + "returns": [], + "needs": [ + "git_fetch_init_options", + "git_remote_download", + "git_remote_fetch" + ] + } + } + ], + [ + "git_filemode_t", + { + "decl": [ + "GIT_FILEMODE_UNREADABLE", + "GIT_FILEMODE_TREE", + "GIT_FILEMODE_BLOB", + "GIT_FILEMODE_BLOB_EXECUTABLE", + "GIT_FILEMODE_LINK", + "GIT_FILEMODE_COMMIT" + ], + "type": "enum", + "file": "types.h", + "line": 205, + "lineto": 212, + "block": "GIT_FILEMODE_UNREADABLE\nGIT_FILEMODE_TREE\nGIT_FILEMODE_BLOB\nGIT_FILEMODE_BLOB_EXECUTABLE\nGIT_FILEMODE_LINK\nGIT_FILEMODE_COMMIT", + "tdef": "typedef", + "description": " Valid modes for index and tree entries. ", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_FILEMODE_UNREADABLE", + "comments": "", + "value": 0 + }, + { + "type": "int", + "name": "GIT_FILEMODE_TREE", + "comments": "", + "value": 16384 + }, + { + "type": "int", + "name": "GIT_FILEMODE_BLOB", + "comments": "", + "value": 33188 + }, + { + "type": "int", + "name": "GIT_FILEMODE_BLOB_EXECUTABLE", + "comments": "", + "value": 33261 + }, + { + "type": "int", + "name": "GIT_FILEMODE_LINK", + "comments": "", + "value": 40960 + }, + { + "type": "int", + "name": "GIT_FILEMODE_COMMIT", + "comments": "", + "value": 57344 + } + ], + "used": { + "returns": [ + "git_tree_entry_filemode", + "git_tree_entry_filemode_raw" + ], + "needs": [ + "git_treebuilder_insert" + ] + } + } + ], + [ + "git_filter", + { + "decl": [ + "unsigned int version", + "const char * attributes", + "git_filter_init_fn initialize", + "git_filter_shutdown_fn shutdown", + "git_filter_check_fn check", + "git_filter_apply_fn apply", + "git_filter_stream_fn stream", + "git_filter_cleanup_fn cleanup" + ], + "type": "struct", + "value": "git_filter", + "file": "sys/filter.h", + "line": 226, + "lineto": 271, + "tdef": null, + "description": " Filter structure used to register custom filters.", + "comments": "

To associate extra data with a filter, allocate extra data and put the git_filter struct at the start of your data buffer, then cast the self pointer to your larger structure when your callback is invoked.

\n", + "block": "unsigned int version\nconst char * attributes\ngit_filter_init_fn initialize\ngit_filter_shutdown_fn shutdown\ngit_filter_check_fn check\ngit_filter_apply_fn apply\ngit_filter_stream_fn stream\ngit_filter_cleanup_fn cleanup", + "fields": [ + { + "type": "unsigned int", + "name": "version", + "comments": " The `version` field should be set to `GIT_FILTER_VERSION`. " + }, + { + "type": "const char *", + "name": "attributes", + "comments": " A whitespace-separated list of attribute names to check for this\n filter (e.g. \"eol crlf text\"). If the attribute name is bare, it\n will be simply loaded and passed to the `check` callback. If it\n has a value (i.e. \"name=value\"), the attribute must match that\n value for the filter to be applied. The value may be a wildcard\n (eg, \"name=*\"), in which case the filter will be invoked for any\n value for the given attribute name. See the attribute parameter\n of the `check` callback for the attribute value that was specified." + }, + { + "type": "git_filter_init_fn", + "name": "initialize", + "comments": " Called when the filter is first used for any file. " + }, + { + "type": "git_filter_shutdown_fn", + "name": "shutdown", + "comments": " Called when the filter is removed or unregistered from the system. " + }, + { + "type": "git_filter_check_fn", + "name": "check", + "comments": " Called to determine whether the filter should be invoked for a\n given file. If this function returns `GIT_PASSTHROUGH` then the\n `apply` function will not be invoked and the contents will be passed\n through unmodified." + }, + { + "type": "git_filter_apply_fn", + "name": "apply", + "comments": " Called to actually apply the filter to file contents. If this\n function returns `GIT_PASSTHROUGH` then the contents will be passed\n through unmodified." + }, + { + "type": "git_filter_stream_fn", + "name": "stream", + "comments": " Called to apply the filter in a streaming manner. If this is not\n specified then the system will call `apply` with the whole buffer." + }, + { + "type": "git_filter_cleanup_fn", + "name": "cleanup", + "comments": " Called when the system is done filtering for a file. " + } + ], + "used": { + "returns": [ + "git_filter_lookup", + "git_filter_source_mode" + ], + "needs": [ + "git_filter_apply_fn", + "git_filter_check_fn", + "git_filter_cleanup_fn", + "git_filter_init_fn", + "git_filter_list_apply_to_blob", + "git_filter_list_apply_to_data", + "git_filter_list_apply_to_file", + "git_filter_list_contains", + "git_filter_list_free", + "git_filter_list_load", + "git_filter_list_new", + "git_filter_list_push", + "git_filter_list_stream_blob", + "git_filter_list_stream_data", + "git_filter_list_stream_file", + "git_filter_register", + "git_filter_shutdown_fn", + "git_filter_source_filemode", + "git_filter_source_flags", + "git_filter_source_id", + "git_filter_source_mode", + "git_filter_source_path", + "git_filter_source_repo" + ] + } + } + ], + [ + "git_filter_flag_t", + { + "decl": [ + "GIT_FILTER_DEFAULT", + "GIT_FILTER_ALLOW_UNSAFE" + ], + "type": "enum", + "file": "filter.h", + "line": 41, + "lineto": 44, + "block": "GIT_FILTER_DEFAULT\nGIT_FILTER_ALLOW_UNSAFE", + "tdef": "typedef", + "description": " Filter option flags.", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_FILTER_DEFAULT", + "comments": "", + "value": 0 + }, + { + "type": "int", + "name": "GIT_FILTER_ALLOW_UNSAFE", + "comments": "", + "value": 1 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_filter_list", + { + "decl": "git_filter_list", + "type": "struct", + "value": "git_filter_list", + "file": "filter.h", + "line": 73, + "lineto": 73, + "tdef": "typedef", + "description": " List of filters to be applied", + "comments": "

This represents a list of filters to be applied to a file / blob. You can build the list with one call, apply it with another, and dispose it with a third. In typical usage, there are not many occasions where a git_filter_list is needed directly since the library will generally handle conversions for you, but it can be convenient to be able to build and apply the list sometimes.

\n", + "used": { + "returns": [], + "needs": [ + "git_filter_list_apply_to_blob", + "git_filter_list_apply_to_data", + "git_filter_list_apply_to_file", + "git_filter_list_contains", + "git_filter_list_free", + "git_filter_list_load", + "git_filter_list_new", + "git_filter_list_push", + "git_filter_list_stream_blob", + "git_filter_list_stream_data", + "git_filter_list_stream_file" + ] + } + } + ], + [ + "git_filter_mode_t", + { + "decl": [ + "GIT_FILTER_TO_WORKTREE", + "GIT_FILTER_SMUDGE", + "GIT_FILTER_TO_ODB", + "GIT_FILTER_CLEAN" + ], + "type": "enum", + "file": "filter.h", + "line": 31, + "lineto": 36, + "block": "GIT_FILTER_TO_WORKTREE\nGIT_FILTER_SMUDGE\nGIT_FILTER_TO_ODB\nGIT_FILTER_CLEAN", + "tdef": "typedef", + "description": " Filters are applied in one of two directions: smudging - which is\n exporting a file from the Git object database to the working directory,\n and cleaning - which is importing a file from the working directory to\n the Git object database. These values control which direction of\n change is being applied.", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_FILTER_TO_WORKTREE", + "comments": "", + "value": 0 + }, + { + "type": "int", + "name": "GIT_FILTER_SMUDGE", + "comments": "", + "value": 0 + }, + { + "type": "int", + "name": "GIT_FILTER_TO_ODB", + "comments": "", + "value": 1 + }, + { + "type": "int", + "name": "GIT_FILTER_CLEAN", + "comments": "", + "value": 1 + } + ], + "used": { + "returns": [ + "git_filter_source_mode" + ], + "needs": [ + "git_filter_list_load", + "git_filter_list_new" + ] + } + } + ], + [ + "git_filter_source", + { + "decl": "git_filter_source", + "type": "struct", + "value": "git_filter_source", + "file": "sys/filter.h", + "line": 95, + "lineto": 95, + "tdef": "typedef", + "description": " A filter source represents a file/blob to be processed", + "comments": "", + "used": { + "returns": [], + "needs": [ + "git_filter_apply_fn", + "git_filter_check_fn", + "git_filter_source_filemode", + "git_filter_source_flags", + "git_filter_source_id", + "git_filter_source_mode", + "git_filter_source_path", + "git_filter_source_repo" + ] + } + } + ], + [ + "git_hashsig", + { + "decl": "git_hashsig", + "type": "struct", + "value": "git_hashsig", + "file": "sys/hashsig.h", + "line": 17, + "lineto": 17, + "tdef": "typedef", + "description": " Similarity signature of arbitrary text content based on line hashes", + "comments": "", + "used": { + "returns": [], + "needs": [ + "git_hashsig_compare", + "git_hashsig_create_fromfile", + "git_hashsig_free" + ] + } + } + ], + [ + "git_hashsig_option_t", + { + "decl": [ + "GIT_HASHSIG_NORMAL", + "GIT_HASHSIG_IGNORE_WHITESPACE", + "GIT_HASHSIG_SMART_WHITESPACE", + "GIT_HASHSIG_ALLOW_SMALL_FILES" + ], + "type": "enum", + "file": "sys/hashsig.h", + "line": 25, + "lineto": 45, + "block": "GIT_HASHSIG_NORMAL\nGIT_HASHSIG_IGNORE_WHITESPACE\nGIT_HASHSIG_SMART_WHITESPACE\nGIT_HASHSIG_ALLOW_SMALL_FILES", + "tdef": "typedef", + "description": " Options for hashsig computation", + "comments": "

The options GIT_HASHSIG_NORMAL, GIT_HASHSIG_IGNORE_WHITESPACE, GIT_HASHSIG_SMART_WHITESPACE are exclusive and should not be combined.

\n", + "fields": [ + { + "type": "int", + "name": "GIT_HASHSIG_NORMAL", + "comments": "

Use all data

\n", + "value": 0 + }, + { + "type": "int", + "name": "GIT_HASHSIG_IGNORE_WHITESPACE", + "comments": "

Ignore whitespace

\n", + "value": 1 + }, + { + "type": "int", + "name": "GIT_HASHSIG_SMART_WHITESPACE", + "comments": "

Ignore

\n\n

and all space after

\n", + "value": 2 + }, + { + "type": "int", + "name": "GIT_HASHSIG_ALLOW_SMALL_FILES", + "comments": "

Allow hashing of small files

\n", + "value": 4 + } + ], + "used": { + "returns": [], + "needs": [ + "git_hashsig_create_fromfile" + ] + } + } + ], + [ + "git_idxentry_extended_flag_t", + { + "decl": [ + "GIT_IDXENTRY_INTENT_TO_ADD", + "GIT_IDXENTRY_SKIP_WORKTREE", + "GIT_IDXENTRY_EXTENDED2", + "GIT_IDXENTRY_EXTENDED_FLAGS", + "GIT_IDXENTRY_UPDATE", + "GIT_IDXENTRY_REMOVE", + "GIT_IDXENTRY_UPTODATE", + "GIT_IDXENTRY_ADDED", + "GIT_IDXENTRY_HASHED", + "GIT_IDXENTRY_UNHASHED", + "GIT_IDXENTRY_WT_REMOVE", + "GIT_IDXENTRY_CONFLICTED", + "GIT_IDXENTRY_UNPACKED", + "GIT_IDXENTRY_NEW_SKIP_WORKTREE" + ], + "type": "enum", + "file": "index.h", + "line": 115, + "lineto": 135, + "block": "GIT_IDXENTRY_INTENT_TO_ADD\nGIT_IDXENTRY_SKIP_WORKTREE\nGIT_IDXENTRY_EXTENDED2\nGIT_IDXENTRY_EXTENDED_FLAGS\nGIT_IDXENTRY_UPDATE\nGIT_IDXENTRY_REMOVE\nGIT_IDXENTRY_UPTODATE\nGIT_IDXENTRY_ADDED\nGIT_IDXENTRY_HASHED\nGIT_IDXENTRY_UNHASHED\nGIT_IDXENTRY_WT_REMOVE\nGIT_IDXENTRY_CONFLICTED\nGIT_IDXENTRY_UNPACKED\nGIT_IDXENTRY_NEW_SKIP_WORKTREE", + "tdef": "typedef", + "description": " Bitmasks for on-disk fields of `git_index_entry`'s `flags_extended`", + "comments": "

In memory, the flags_extended fields are divided into two parts: the fields that are read from and written to disk, and other fields that in-memory only and used by libgit2. Only the flags in GIT_IDXENTRY_EXTENDED_FLAGS will get saved on-disk.

\n\n

Thee first three bitmasks match the three fields in the git_index_entry flags_extended value that belong on disk. You can use them to interpret the data in the flags_extended.

\n\n

The rest of the bitmasks match the other fields in the git_index_entry flags_extended value that are only used in-memory by libgit2. You can use them to interpret the data in the flags_extended.

\n", + "fields": [ + { + "type": "int", + "name": "GIT_IDXENTRY_INTENT_TO_ADD", + "comments": "", + "value": 8192 + }, + { + "type": "int", + "name": "GIT_IDXENTRY_SKIP_WORKTREE", + "comments": "", + "value": 16384 + }, + { + "type": "int", + "name": "GIT_IDXENTRY_EXTENDED2", + "comments": "

Reserved for future extension

\n", + "value": 32768 + }, + { + "type": "int", + "name": "GIT_IDXENTRY_EXTENDED_FLAGS", + "comments": "

Reserved for future extension

\n", + "value": 24576 + }, + { + "type": "int", + "name": "GIT_IDXENTRY_UPDATE", + "comments": "

Reserved for future extension

\n", + "value": 1 + }, + { + "type": "int", + "name": "GIT_IDXENTRY_REMOVE", + "comments": "

Reserved for future extension

\n", + "value": 2 + }, + { + "type": "int", + "name": "GIT_IDXENTRY_UPTODATE", + "comments": "

Reserved for future extension

\n", + "value": 4 + }, + { + "type": "int", + "name": "GIT_IDXENTRY_ADDED", + "comments": "

Reserved for future extension

\n", + "value": 8 + }, + { + "type": "int", + "name": "GIT_IDXENTRY_HASHED", + "comments": "

Reserved for future extension

\n", + "value": 16 + }, + { + "type": "int", + "name": "GIT_IDXENTRY_UNHASHED", + "comments": "

Reserved for future extension

\n", + "value": 32 + }, + { + "type": "int", + "name": "GIT_IDXENTRY_WT_REMOVE", + "comments": "

remove in work directory

\n", + "value": 64 + }, + { + "type": "int", + "name": "GIT_IDXENTRY_CONFLICTED", + "comments": "", + "value": 128 + }, + { + "type": "int", + "name": "GIT_IDXENTRY_UNPACKED", + "comments": "", + "value": 256 + }, + { + "type": "int", + "name": "GIT_IDXENTRY_NEW_SKIP_WORKTREE", + "comments": "", + "value": 512 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_index", + { + "decl": "git_index", + "type": "struct", + "value": "git_index", + "file": "types.h", + "line": 132, + "lineto": 132, + "tdef": "typedef", + "description": " Memory representation of an index file. ", + "comments": "", + "used": { + "returns": [ + "git_index_get_byindex", + "git_index_get_bypath" + ], + "needs": [ + "git_checkout_index", + "git_cherrypick_commit", + "git_diff_index_to_index", + "git_diff_index_to_workdir", + "git_diff_tree_to_index", + "git_index_add", + "git_index_add_all", + "git_index_add_bypath", + "git_index_add_frombuffer", + "git_index_caps", + "git_index_checksum", + "git_index_clear", + "git_index_conflict_add", + "git_index_conflict_cleanup", + "git_index_conflict_get", + "git_index_conflict_iterator_free", + "git_index_conflict_iterator_new", + "git_index_conflict_next", + "git_index_conflict_remove", + "git_index_entry_is_conflict", + "git_index_entry_stage", + "git_index_entrycount", + "git_index_find", + "git_index_find_prefix", + "git_index_free", + "git_index_get_byindex", + "git_index_get_bypath", + "git_index_has_conflicts", + "git_index_new", + "git_index_open", + "git_index_owner", + "git_index_path", + "git_index_read", + "git_index_read_tree", + "git_index_remove", + "git_index_remove_all", + "git_index_remove_bypath", + "git_index_remove_directory", + "git_index_set_caps", + "git_index_update_all", + "git_index_write", + "git_index_write_tree", + "git_index_write_tree_to", + "git_indexer_append", + "git_indexer_commit", + "git_indexer_free", + "git_indexer_hash", + "git_indexer_new", + "git_merge_commits", + "git_merge_file_from_index", + "git_merge_trees", + "git_pathspec_match_index", + "git_rebase_inmemory_index", + "git_repository_index", + "git_repository_set_index", + "git_revert_commit" + ] + } + } + ], + [ + "git_index_add_option_t", + { + "decl": [ + "GIT_INDEX_ADD_DEFAULT", + "GIT_INDEX_ADD_FORCE", + "GIT_INDEX_ADD_DISABLE_PATHSPEC_MATCH", + "GIT_INDEX_ADD_CHECK_PATHSPEC" + ], + "type": "enum", + "file": "index.h", + "line": 150, + "lineto": 155, + "block": "GIT_INDEX_ADD_DEFAULT\nGIT_INDEX_ADD_FORCE\nGIT_INDEX_ADD_DISABLE_PATHSPEC_MATCH\nGIT_INDEX_ADD_CHECK_PATHSPEC", + "tdef": "typedef", + "description": " Flags for APIs that add files matching pathspec ", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_INDEX_ADD_DEFAULT", + "comments": "", + "value": 0 + }, + { + "type": "int", + "name": "GIT_INDEX_ADD_FORCE", + "comments": "", + "value": 1 + }, + { + "type": "int", + "name": "GIT_INDEX_ADD_DISABLE_PATHSPEC_MATCH", + "comments": "", + "value": 2 + }, + { + "type": "int", + "name": "GIT_INDEX_ADD_CHECK_PATHSPEC", + "comments": "", + "value": 4 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_index_conflict_iterator", + { + "decl": "git_index_conflict_iterator", + "type": "struct", + "value": "git_index_conflict_iterator", + "file": "types.h", + "line": 135, + "lineto": 135, + "tdef": "typedef", + "description": " An iterator for conflicts in the index. ", + "comments": "", + "used": { + "returns": [], + "needs": [ + "git_index_conflict_iterator_free", + "git_index_conflict_iterator_new", + "git_index_conflict_next" + ] + } + } + ], + [ + "git_index_entry", + { + "decl": [ + "git_index_time ctime", + "git_index_time mtime", + "uint32_t dev", + "uint32_t ino", + "uint32_t mode", + "uint32_t uid", + "uint32_t gid", + "uint32_t file_size", + "git_oid id", + "uint16_t flags", + "uint16_t flags_extended", + "const char * path" + ], + "type": "struct", + "value": "git_index_entry", + "file": "index.h", + "line": 53, + "lineto": 70, + "block": "git_index_time ctime\ngit_index_time mtime\nuint32_t dev\nuint32_t ino\nuint32_t mode\nuint32_t uid\nuint32_t gid\nuint32_t file_size\ngit_oid id\nuint16_t flags\nuint16_t flags_extended\nconst char * path", + "tdef": "typedef", + "description": " In-memory representation of a file entry in the index.", + "comments": "

This is a public structure that represents a file entry in the index. The meaning of the fields corresponds to core Git's documentation (in "Documentation/technical/index-format.txt").

\n\n

The flags field consists of a number of bit fields which can be accessed via the first set of GIT_IDXENTRY_... bitmasks below. These flags are all read from and persisted to disk.

\n\n

The flags_extended field also has a number of bit fields which can be accessed via the later GIT_IDXENTRY_... bitmasks below. Some of these flags are read from and written to disk, but some are set aside for in-memory only reference.

\n\n

Note that the time and size fields are truncated to 32 bits. This is enough to detect changes, which is enough for the index to function as a cache, but it should not be taken as an authoritative source for that data.

\n", + "fields": [ + { + "type": "git_index_time", + "name": "ctime", + "comments": "" + }, + { + "type": "git_index_time", + "name": "mtime", + "comments": "" + }, + { + "type": "uint32_t", + "name": "dev", + "comments": "" + }, + { + "type": "uint32_t", + "name": "ino", + "comments": "" + }, + { + "type": "uint32_t", + "name": "mode", + "comments": "" + }, + { + "type": "uint32_t", + "name": "uid", + "comments": "" + }, + { + "type": "uint32_t", + "name": "gid", + "comments": "" + }, + { + "type": "uint32_t", + "name": "file_size", + "comments": "" + }, + { + "type": "git_oid", + "name": "id", + "comments": "" + }, + { + "type": "uint16_t", + "name": "flags", + "comments": "" + }, + { + "type": "uint16_t", + "name": "flags_extended", + "comments": "" + }, + { + "type": "const char *", + "name": "path", + "comments": "" + } + ], + "used": { + "returns": [ + "git_index_get_byindex", + "git_index_get_bypath" + ], + "needs": [ + "git_index_add", + "git_index_add_frombuffer", + "git_index_conflict_add", + "git_index_conflict_get", + "git_index_conflict_next", + "git_index_entry_is_conflict", + "git_index_entry_stage", + "git_merge_file_from_index" + ] + } + } + ], + [ + "git_index_time", + { + "decl": [ + "int32_t seconds", + "uint32_t nanoseconds" + ], + "type": "struct", + "value": "git_index_time", + "file": "index.h", + "line": 26, + "lineto": 30, + "block": "int32_t seconds\nuint32_t nanoseconds", + "tdef": "typedef", + "description": " Time structure used in a git index entry ", + "comments": "", + "fields": [ + { + "type": "int32_t", + "name": "seconds", + "comments": "" + }, + { + "type": "uint32_t", + "name": "nanoseconds", + "comments": "" + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_indexcap_t", + { + "decl": [ + "GIT_INDEXCAP_IGNORE_CASE", + "GIT_INDEXCAP_NO_FILEMODE", + "GIT_INDEXCAP_NO_SYMLINKS", + "GIT_INDEXCAP_FROM_OWNER" + ], + "type": "enum", + "file": "index.h", + "line": 138, + "lineto": 143, + "block": "GIT_INDEXCAP_IGNORE_CASE\nGIT_INDEXCAP_NO_FILEMODE\nGIT_INDEXCAP_NO_SYMLINKS\nGIT_INDEXCAP_FROM_OWNER", + "tdef": "typedef", + "description": " Capabilities of system that affect index actions. ", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_INDEXCAP_IGNORE_CASE", + "comments": "", + "value": 1 + }, + { + "type": "int", + "name": "GIT_INDEXCAP_NO_FILEMODE", + "comments": "", + "value": 2 + }, + { + "type": "int", + "name": "GIT_INDEXCAP_NO_SYMLINKS", + "comments": "", + "value": 4 + }, + { + "type": "int", + "name": "GIT_INDEXCAP_FROM_OWNER", + "comments": "", + "value": -1 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_indxentry_flag_t", + { + "decl": [ + "GIT_IDXENTRY_EXTENDED", + "GIT_IDXENTRY_VALID" + ], + "type": "enum", + "file": "index.h", + "line": 86, + "lineto": 89, + "block": "GIT_IDXENTRY_EXTENDED\nGIT_IDXENTRY_VALID", + "tdef": "typedef", + "description": " Flags for index entries", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_IDXENTRY_EXTENDED", + "comments": "", + "value": 16384 + }, + { + "type": "int", + "name": "GIT_IDXENTRY_VALID", + "comments": "", + "value": 32768 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_libgit2_opt_t", + { + "decl": [ + "GIT_OPT_GET_MWINDOW_SIZE", + "GIT_OPT_SET_MWINDOW_SIZE", + "GIT_OPT_GET_MWINDOW_MAPPED_LIMIT", + "GIT_OPT_SET_MWINDOW_MAPPED_LIMIT", + "GIT_OPT_GET_SEARCH_PATH", + "GIT_OPT_SET_SEARCH_PATH", + "GIT_OPT_SET_CACHE_OBJECT_LIMIT", + "GIT_OPT_SET_CACHE_MAX_SIZE", + "GIT_OPT_ENABLE_CACHING", + "GIT_OPT_GET_CACHED_MEMORY", + "GIT_OPT_GET_TEMPLATE_PATH", + "GIT_OPT_SET_TEMPLATE_PATH", + "GIT_OPT_SET_SSL_CERT_LOCATIONS", + "GIT_OPT_SET_USER_AGENT", + "GIT_OPT_ENABLE_STRICT_OBJECT_CREATION", + "GIT_OPT_SET_SSL_CIPHERS" + ], + "type": "enum", + "file": "common.h", + "line": 144, + "lineto": 161, + "block": "GIT_OPT_GET_MWINDOW_SIZE\nGIT_OPT_SET_MWINDOW_SIZE\nGIT_OPT_GET_MWINDOW_MAPPED_LIMIT\nGIT_OPT_SET_MWINDOW_MAPPED_LIMIT\nGIT_OPT_GET_SEARCH_PATH\nGIT_OPT_SET_SEARCH_PATH\nGIT_OPT_SET_CACHE_OBJECT_LIMIT\nGIT_OPT_SET_CACHE_MAX_SIZE\nGIT_OPT_ENABLE_CACHING\nGIT_OPT_GET_CACHED_MEMORY\nGIT_OPT_GET_TEMPLATE_PATH\nGIT_OPT_SET_TEMPLATE_PATH\nGIT_OPT_SET_SSL_CERT_LOCATIONS\nGIT_OPT_SET_USER_AGENT\nGIT_OPT_ENABLE_STRICT_OBJECT_CREATION\nGIT_OPT_SET_SSL_CIPHERS", + "tdef": "typedef", + "description": " Global library options", + "comments": "

These are used to select which global option to set or get and are used in git_libgit2_opts().

\n", + "fields": [ + { + "type": "int", + "name": "GIT_OPT_GET_MWINDOW_SIZE", + "comments": "", + "value": 0 + }, + { + "type": "int", + "name": "GIT_OPT_SET_MWINDOW_SIZE", + "comments": "", + "value": 1 + }, + { + "type": "int", + "name": "GIT_OPT_GET_MWINDOW_MAPPED_LIMIT", + "comments": "", + "value": 2 + }, + { + "type": "int", + "name": "GIT_OPT_SET_MWINDOW_MAPPED_LIMIT", + "comments": "", + "value": 3 + }, + { + "type": "int", + "name": "GIT_OPT_GET_SEARCH_PATH", + "comments": "", + "value": 4 + }, + { + "type": "int", + "name": "GIT_OPT_SET_SEARCH_PATH", + "comments": "", + "value": 5 + }, + { + "type": "int", + "name": "GIT_OPT_SET_CACHE_OBJECT_LIMIT", + "comments": "", + "value": 6 + }, + { + "type": "int", + "name": "GIT_OPT_SET_CACHE_MAX_SIZE", + "comments": "", + "value": 7 + }, + { + "type": "int", + "name": "GIT_OPT_ENABLE_CACHING", + "comments": "", + "value": 8 + }, + { + "type": "int", + "name": "GIT_OPT_GET_CACHED_MEMORY", + "comments": "", + "value": 9 + }, + { + "type": "int", + "name": "GIT_OPT_GET_TEMPLATE_PATH", + "comments": "", + "value": 10 + }, + { + "type": "int", + "name": "GIT_OPT_SET_TEMPLATE_PATH", + "comments": "", + "value": 11 + }, + { + "type": "int", + "name": "GIT_OPT_SET_SSL_CERT_LOCATIONS", + "comments": "", + "value": 12 + }, + { + "type": "int", + "name": "GIT_OPT_SET_USER_AGENT", + "comments": "", + "value": 13 + }, + { + "type": "int", + "name": "GIT_OPT_ENABLE_STRICT_OBJECT_CREATION", + "comments": "", + "value": 14 + }, + { + "type": "int", + "name": "GIT_OPT_SET_SSL_CIPHERS", + "comments": "", + "value": 15 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_merge_analysis_t", + { + "decl": [ + "GIT_MERGE_ANALYSIS_NONE", + "GIT_MERGE_ANALYSIS_NORMAL", + "GIT_MERGE_ANALYSIS_UP_TO_DATE", + "GIT_MERGE_ANALYSIS_FASTFORWARD", + "GIT_MERGE_ANALYSIS_UNBORN" + ], + "type": "enum", + "file": "merge.h", + "line": 311, + "lineto": 340, + "block": "GIT_MERGE_ANALYSIS_NONE\nGIT_MERGE_ANALYSIS_NORMAL\nGIT_MERGE_ANALYSIS_UP_TO_DATE\nGIT_MERGE_ANALYSIS_FASTFORWARD\nGIT_MERGE_ANALYSIS_UNBORN", + "tdef": "typedef", + "description": " The results of `git_merge_analysis` indicate the merge opportunities.", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_MERGE_ANALYSIS_NONE", + "comments": "

No merge is possible. (Unused.)

\n", + "value": 0 + }, + { + "type": "int", + "name": "GIT_MERGE_ANALYSIS_NORMAL", + "comments": "

A "normal" merge; both HEAD and the given merge input have diverged\n from their common ancestor. The divergent commits must be merged.

\n", + "value": 1 + }, + { + "type": "int", + "name": "GIT_MERGE_ANALYSIS_UP_TO_DATE", + "comments": "

All given merge inputs are reachable from HEAD, meaning the\n repository is up-to-date and no merge needs to be performed.

\n", + "value": 2 + }, + { + "type": "int", + "name": "GIT_MERGE_ANALYSIS_FASTFORWARD", + "comments": "

The given merge input is a fast-forward from HEAD and no merge\n needs to be performed. Instead, the client can check out the\n given merge input.

\n", + "value": 4 + }, + { + "type": "int", + "name": "GIT_MERGE_ANALYSIS_UNBORN", + "comments": "

The HEAD of the current repository is "unborn" and does not point to\n a valid commit. No merge can be performed, but the caller may wish\n to simply set HEAD to the target commit(s).

\n", + "value": 8 + } + ], + "used": { + "returns": [], + "needs": [ + "git_merge_analysis" + ] + } + } + ], + [ + "git_merge_driver", + { + "decl": [ + "unsigned int version", + "git_merge_driver_init_fn initialize", + "git_merge_driver_shutdown_fn shutdown", + "git_merge_driver_apply_fn apply" + ], + "type": "struct", + "value": "git_merge_driver", + "file": "sys/merge.h", + "line": 118, + "lineto": 135, + "block": "unsigned int version\ngit_merge_driver_init_fn initialize\ngit_merge_driver_shutdown_fn shutdown\ngit_merge_driver_apply_fn apply", + "tdef": null, + "description": " Merge driver structure used to register custom merge drivers.", + "comments": "

To associate extra data with a driver, allocate extra data and put the git_merge_driver struct at the start of your data buffer, then cast the self pointer to your larger structure when your callback is invoked.

\n", + "fields": [ + { + "type": "unsigned int", + "name": "version", + "comments": " The `version` should be set to `GIT_MERGE_DRIVER_VERSION`. " + }, + { + "type": "git_merge_driver_init_fn", + "name": "initialize", + "comments": " Called when the merge driver is first used for any file. " + }, + { + "type": "git_merge_driver_shutdown_fn", + "name": "shutdown", + "comments": " Called when the merge driver is unregistered from the system. " + }, + { + "type": "git_merge_driver_apply_fn", + "name": "apply", + "comments": " Called to merge the contents of a conflict. If this function\n returns `GIT_PASSTHROUGH` then the default (`text`) merge driver\n will instead be invoked. If this function returns\n `GIT_EMERGECONFLICT` then the file will remain conflicted." + } + ], + "used": { + "returns": [], + "needs": [ + "git_merge_driver_apply_fn" + ] + } + } + ], + [ + "git_merge_driver_source", + { + "decl": "git_merge_driver_source", + "type": "struct", + "value": "git_merge_driver_source", + "file": "sys/merge.h", + "line": 36, + "lineto": 36, + "tdef": "typedef", + "description": " A merge driver source represents the file to be merged", + "comments": "", + "used": { + "returns": [], + "needs": [ + "git_merge_driver_apply_fn" + ] + } + } + ], + [ + "git_merge_file_favor_t", + { + "decl": [ + "GIT_MERGE_FILE_FAVOR_NORMAL", + "GIT_MERGE_FILE_FAVOR_OURS", + "GIT_MERGE_FILE_FAVOR_THEIRS", + "GIT_MERGE_FILE_FAVOR_UNION" + ], + "type": "enum", + "file": "merge.h", + "line": 101, + "lineto": 131, + "block": "GIT_MERGE_FILE_FAVOR_NORMAL\nGIT_MERGE_FILE_FAVOR_OURS\nGIT_MERGE_FILE_FAVOR_THEIRS\nGIT_MERGE_FILE_FAVOR_UNION", + "tdef": "typedef", + "description": " Merge file favor options for `git_merge_options` instruct the file-level\n merging functionality how to deal with conflicting regions of the files.", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_MERGE_FILE_FAVOR_NORMAL", + "comments": "

When a region of a file is changed in both branches, a conflict\n will be recorded in the index so that git_checkout can produce\n a merge file with conflict markers in the working directory.\n This is the default.

\n", + "value": 0 + }, + { + "type": "int", + "name": "GIT_MERGE_FILE_FAVOR_OURS", + "comments": "

When a region of a file is changed in both branches, the file\n created in the index will contain the "ours" side of any conflicting\n region. The index will not record a conflict.

\n", + "value": 1 + }, + { + "type": "int", + "name": "GIT_MERGE_FILE_FAVOR_THEIRS", + "comments": "

When a region of a file is changed in both branches, the file\n created in the index will contain the "theirs" side of any conflicting\n region. The index will not record a conflict.

\n", + "value": 2 + }, + { + "type": "int", + "name": "GIT_MERGE_FILE_FAVOR_UNION", + "comments": "

When a region of a file is changed in both branches, the file\n created in the index will contain each unique line from each side,\n which has the result of combining both files. The index will not\n record a conflict.

\n", + "value": 3 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_merge_file_flag_t", + { + "decl": [ + "GIT_MERGE_FILE_DEFAULT", + "GIT_MERGE_FILE_STYLE_MERGE", + "GIT_MERGE_FILE_STYLE_DIFF3", + "GIT_MERGE_FILE_SIMPLIFY_ALNUM", + "GIT_MERGE_FILE_IGNORE_WHITESPACE", + "GIT_MERGE_FILE_IGNORE_WHITESPACE_CHANGE", + "GIT_MERGE_FILE_IGNORE_WHITESPACE_EOL", + "GIT_MERGE_FILE_DIFF_PATIENCE", + "GIT_MERGE_FILE_DIFF_MINIMAL" + ], + "type": "enum", + "file": "merge.h", + "line": 136, + "lineto": 163, + "block": "GIT_MERGE_FILE_DEFAULT\nGIT_MERGE_FILE_STYLE_MERGE\nGIT_MERGE_FILE_STYLE_DIFF3\nGIT_MERGE_FILE_SIMPLIFY_ALNUM\nGIT_MERGE_FILE_IGNORE_WHITESPACE\nGIT_MERGE_FILE_IGNORE_WHITESPACE_CHANGE\nGIT_MERGE_FILE_IGNORE_WHITESPACE_EOL\nGIT_MERGE_FILE_DIFF_PATIENCE\nGIT_MERGE_FILE_DIFF_MINIMAL", + "tdef": "typedef", + "description": " File merging flags", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_MERGE_FILE_DEFAULT", + "comments": "

Defaults

\n", + "value": 0 + }, + { + "type": "int", + "name": "GIT_MERGE_FILE_STYLE_MERGE", + "comments": "

Create standard conflicted merge files

\n", + "value": 1 + }, + { + "type": "int", + "name": "GIT_MERGE_FILE_STYLE_DIFF3", + "comments": "

Create diff3-style files

\n", + "value": 2 + }, + { + "type": "int", + "name": "GIT_MERGE_FILE_SIMPLIFY_ALNUM", + "comments": "

Condense non-alphanumeric regions for simplified diff file

\n", + "value": 4 + }, + { + "type": "int", + "name": "GIT_MERGE_FILE_IGNORE_WHITESPACE", + "comments": "

Ignore all whitespace

\n", + "value": 8 + }, + { + "type": "int", + "name": "GIT_MERGE_FILE_IGNORE_WHITESPACE_CHANGE", + "comments": "

Ignore changes in amount of whitespace

\n", + "value": 16 + }, + { + "type": "int", + "name": "GIT_MERGE_FILE_IGNORE_WHITESPACE_EOL", + "comments": "

Ignore whitespace at end of line

\n", + "value": 32 + }, + { + "type": "int", + "name": "GIT_MERGE_FILE_DIFF_PATIENCE", + "comments": "

Use the "patience diff" algorithm

\n", + "value": 64 + }, + { + "type": "int", + "name": "GIT_MERGE_FILE_DIFF_MINIMAL", + "comments": "

Take extra time to find minimal diff

\n", + "value": 128 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_merge_file_input", + { + "decl": [ + "unsigned int version", + "const char * ptr", + "size_t size", + "const char * path", + "unsigned int mode" + ], + "type": "struct", + "value": "git_merge_file_input", + "file": "merge.h", + "line": 32, + "lineto": 46, + "block": "unsigned int version\nconst char * ptr\nsize_t size\nconst char * path\nunsigned int mode", + "tdef": "typedef", + "description": " The file inputs to `git_merge_file`. Callers should populate the\n `git_merge_file_input` structure with descriptions of the files in\n each side of the conflict for use in producing the merge file.", + "comments": "", + "fields": [ + { + "type": "unsigned int", + "name": "version", + "comments": "" + }, + { + "type": "const char *", + "name": "ptr", + "comments": " Pointer to the contents of the file. " + }, + { + "type": "size_t", + "name": "size", + "comments": " Size of the contents pointed to in `ptr`. " + }, + { + "type": "const char *", + "name": "path", + "comments": " File name of the conflicted file, or `NULL` to not merge the path. " + }, + { + "type": "unsigned int", + "name": "mode", + "comments": " File mode of the conflicted file, or `0` to not merge the mode. " + } + ], + "used": { + "returns": [], + "needs": [ + "git_merge_file", + "git_merge_file_init_input" + ] + } + } + ], + [ + "git_merge_file_options", + { + "decl": [ + "unsigned int version", + "const char * ancestor_label", + "const char * our_label", + "const char * their_label", + "git_merge_file_favor_t favor", + "git_merge_file_flag_t flags" + ], + "type": "struct", + "value": "git_merge_file_options", + "file": "merge.h", + "line": 168, + "lineto": 194, + "block": "unsigned int version\nconst char * ancestor_label\nconst char * our_label\nconst char * their_label\ngit_merge_file_favor_t favor\ngit_merge_file_flag_t flags", + "tdef": "typedef", + "description": " Options for merging a file", + "comments": "", + "fields": [ + { + "type": "unsigned int", + "name": "version", + "comments": "" + }, + { + "type": "const char *", + "name": "ancestor_label", + "comments": " Label for the ancestor file side of the conflict which will be prepended\n to labels in diff3-format merge files." + }, + { + "type": "const char *", + "name": "our_label", + "comments": " Label for our file side of the conflict which will be prepended\n to labels in merge files." + }, + { + "type": "const char *", + "name": "their_label", + "comments": " Label for their file side of the conflict which will be prepended\n to labels in merge files." + }, + { + "type": "git_merge_file_favor_t", + "name": "favor", + "comments": " The file to favor in region conflicts. " + }, + { + "type": "git_merge_file_flag_t", + "name": "flags", + "comments": " see `git_merge_file_flag_t` above " + } + ], + "used": { + "returns": [], + "needs": [ + "git_merge_file", + "git_merge_file_from_index", + "git_merge_file_init_options" + ] + } + } + ], + [ + "git_merge_file_result", + { + "decl": [ + "unsigned int automergeable", + "const char * path", + "unsigned int mode", + "const char * ptr", + "size_t len" + ], + "type": "struct", + "value": "git_merge_file_result", + "file": "merge.h", + "line": 215, + "lineto": 236, + "block": "unsigned int automergeable\nconst char * path\nunsigned int mode\nconst char * ptr\nsize_t len", + "tdef": "typedef", + "description": " Information about file-level merging", + "comments": "", + "fields": [ + { + "type": "unsigned int", + "name": "automergeable", + "comments": " True if the output was automerged, false if the output contains\n conflict markers." + }, + { + "type": "const char *", + "name": "path", + "comments": " The path that the resultant merge file should use, or NULL if a\n filename conflict would occur." + }, + { + "type": "unsigned int", + "name": "mode", + "comments": " The mode that the resultant merge file should use. " + }, + { + "type": "const char *", + "name": "ptr", + "comments": " The contents of the merge. " + }, + { + "type": "size_t", + "name": "len", + "comments": " The length of the merge contents. " + } + ], + "used": { + "returns": [], + "needs": [ + "git_merge_file", + "git_merge_file_from_index", + "git_merge_file_result_free" + ] + } + } + ], + [ + "git_merge_flag_t", + { + "decl": [ + "GIT_MERGE_FIND_RENAMES", + "GIT_MERGE_FAIL_ON_CONFLICT", + "GIT_MERGE_SKIP_REUC", + "GIT_MERGE_NO_RECURSIVE" + ], + "type": "enum", + "file": "merge.h", + "line": 68, + "lineto": 95, + "block": "GIT_MERGE_FIND_RENAMES\nGIT_MERGE_FAIL_ON_CONFLICT\nGIT_MERGE_SKIP_REUC\nGIT_MERGE_NO_RECURSIVE", + "tdef": "typedef", + "description": " Flags for `git_merge` options. A combination of these flags can be\n passed in via the `flags` value in the `git_merge_options`.", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_MERGE_FIND_RENAMES", + "comments": "

Detect renames that occur between the common ancestor and the "ours"\n side or the common ancestor and the "theirs" side. This will enable\n the ability to merge between a modified and renamed file.

\n", + "value": 1 + }, + { + "type": "int", + "name": "GIT_MERGE_FAIL_ON_CONFLICT", + "comments": "

If a conflict occurs, exit immediately instead of attempting to\n continue resolving conflicts. The merge operation will fail with\n GIT_EMERGECONFLICT and no index will be returned.

\n", + "value": 2 + }, + { + "type": "int", + "name": "GIT_MERGE_SKIP_REUC", + "comments": "

Do not write the REUC extension on the generated index

\n", + "value": 4 + }, + { + "type": "int", + "name": "GIT_MERGE_NO_RECURSIVE", + "comments": "

If the commits being merged have multiple merge bases, do not build\n a recursive merge base (by merging the multiple merge bases),\n instead simply use the first base. This flag provides a similar\n merge base to git-merge-resolve.

\n", + "value": 8 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_merge_options", + { + "decl": [ + "unsigned int version", + "git_merge_flag_t flags", + "unsigned int rename_threshold", + "unsigned int target_limit", + "git_diff_similarity_metric * metric", + "unsigned int recursion_limit", + "const char * default_driver", + "git_merge_file_favor_t file_favor", + "git_merge_file_flag_t file_flags" + ], + "type": "struct", + "value": "git_merge_options", + "file": "merge.h", + "line": 241, + "lineto": 290, + "block": "unsigned int version\ngit_merge_flag_t flags\nunsigned int rename_threshold\nunsigned int target_limit\ngit_diff_similarity_metric * metric\nunsigned int recursion_limit\nconst char * default_driver\ngit_merge_file_favor_t file_favor\ngit_merge_file_flag_t file_flags", + "tdef": "typedef", + "description": " Merging options", + "comments": "", + "fields": [ + { + "type": "unsigned int", + "name": "version", + "comments": "" + }, + { + "type": "git_merge_flag_t", + "name": "flags", + "comments": " See `git_merge_flag_t` above " + }, + { + "type": "unsigned int", + "name": "rename_threshold", + "comments": " Similarity to consider a file renamed (default 50). If\n `GIT_MERGE_FIND_RENAMES` is enabled, added files will be compared\n with deleted files to determine their similarity. Files that are\n more similar than the rename threshold (percentage-wise) will be\n treated as a rename." + }, + { + "type": "unsigned int", + "name": "target_limit", + "comments": " Maximum similarity sources to examine for renames (default 200).\n If the number of rename candidates (add / delete pairs) is greater\n than this value, inexact rename detection is aborted.\n\n This setting overrides the `merge.renameLimit` configuration value." + }, + { + "type": "git_diff_similarity_metric *", + "name": "metric", + "comments": " Pluggable similarity metric; pass NULL to use internal metric " + }, + { + "type": "unsigned int", + "name": "recursion_limit", + "comments": " Maximum number of times to merge common ancestors to build a\n virtual merge base when faced with criss-cross merges. When this\n limit is reached, the next ancestor will simply be used instead of\n attempting to merge it. The default is unlimited." + }, + { + "type": "const char *", + "name": "default_driver", + "comments": " Default merge driver to be used when both sides of a merge have\n changed. The default is the `text` driver." + }, + { + "type": "git_merge_file_favor_t", + "name": "file_favor", + "comments": " Flags for handling conflicting content, to be used with the standard\n (`text`) merge driver." + }, + { + "type": "git_merge_file_flag_t", + "name": "file_flags", + "comments": " see `git_merge_file_flag_t` above " + } + ], + "used": { + "returns": [], + "needs": [ + "git_cherrypick_commit", + "git_merge", + "git_merge_commits", + "git_merge_init_options", + "git_merge_trees", + "git_revert_commit" + ] + } + } + ], + [ + "git_merge_preference_t", + { + "decl": [ + "GIT_MERGE_PREFERENCE_NONE", + "GIT_MERGE_PREFERENCE_NO_FASTFORWARD", + "GIT_MERGE_PREFERENCE_FASTFORWARD_ONLY" + ], + "type": "enum", + "file": "merge.h", + "line": 345, + "lineto": 363, + "block": "GIT_MERGE_PREFERENCE_NONE\nGIT_MERGE_PREFERENCE_NO_FASTFORWARD\nGIT_MERGE_PREFERENCE_FASTFORWARD_ONLY", + "tdef": "typedef", + "description": " The user's stated preference for merges.", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_MERGE_PREFERENCE_NONE", + "comments": "

No configuration was found that suggests a preferred behavior for\n merge.

\n", + "value": 0 + }, + { + "type": "int", + "name": "GIT_MERGE_PREFERENCE_NO_FASTFORWARD", + "comments": "

There is a merge.ff=false configuration setting, suggesting that\n the user does not want to allow a fast-forward merge.

\n", + "value": 1 + }, + { + "type": "int", + "name": "GIT_MERGE_PREFERENCE_FASTFORWARD_ONLY", + "comments": "

There is a merge.ff=only configuration setting, suggesting that\n the user only wants fast-forward merges.

\n", + "value": 2 + } + ], + "used": { + "returns": [], + "needs": [ + "git_merge_analysis" + ] + } + } + ], + [ + "git_merge_result", + { + "decl": "git_merge_result", + "type": "struct", + "value": "git_merge_result", + "file": "types.h", + "line": 181, + "lineto": 181, + "tdef": "typedef", + "description": " Merge result ", + "comments": "", + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_note", + { + "decl": "git_note", + "type": "struct", + "value": "git_note", + "file": "types.h", + "line": 150, + "lineto": 150, + "tdef": "typedef", + "description": " Representation of a git note ", + "comments": "", + "used": { + "returns": [], + "needs": [ + "git_note_author", + "git_note_committer", + "git_note_foreach", + "git_note_free", + "git_note_id", + "git_note_iterator_free", + "git_note_iterator_new", + "git_note_message", + "git_note_next", + "git_note_read" + ] + } + } + ], + [ + "git_note_iterator", + { + "decl": "git_note_iterator", + "type": "struct", + "value": "git_note_iterator", + "file": "notes.h", + "line": 35, + "lineto": 35, + "tdef": "typedef", + "description": " note iterator", + "comments": "", + "used": { + "returns": [], + "needs": [ + "git_note_iterator_free", + "git_note_iterator_new", + "git_note_next" + ] + } + } + ], + [ + "git_object", + { + "decl": "git_object", + "type": "struct", + "value": "git_object", + "file": "types.h", + "line": 108, + "lineto": 108, + "tdef": "typedef", + "description": " Representation of a generic object in a repository ", + "comments": "", + "used": { + "returns": [], + "needs": [ + "git_checkout_tree", + "git_describe_commit", + "git_object_dup", + "git_object_free", + "git_object_id", + "git_object_lookup", + "git_object_lookup_bypath", + "git_object_lookup_prefix", + "git_object_owner", + "git_object_peel", + "git_object_short_id", + "git_object_type", + "git_reference_peel", + "git_reset", + "git_reset_default", + "git_revparse_ext", + "git_revparse_single", + "git_tag_annotation_create", + "git_tag_create", + "git_tag_create_lightweight", + "git_tag_peel", + "git_tag_target", + "git_tree_entry_to_object" + ] + } + } + ], + [ + "git_odb", + { + "decl": "git_odb", + "type": "struct", + "value": "git_odb", + "file": "types.h", + "line": 81, + "lineto": 81, + "tdef": "typedef", + "description": " An open object database handle. ", + "comments": "", + "used": { + "returns": [], + "needs": [ + "git_indexer_new", + "git_mempack_new", + "git_mempack_reset", + "git_odb_add_alternate", + "git_odb_add_backend", + "git_odb_add_disk_alternate", + "git_odb_backend_loose", + "git_odb_backend_one_pack", + "git_odb_backend_pack", + "git_odb_exists", + "git_odb_exists_prefix", + "git_odb_expand_ids", + "git_odb_foreach", + "git_odb_free", + "git_odb_get_backend", + "git_odb_init_backend", + "git_odb_new", + "git_odb_num_backends", + "git_odb_object_data", + "git_odb_object_dup", + "git_odb_object_free", + "git_odb_object_id", + "git_odb_object_size", + "git_odb_object_type", + "git_odb_open", + "git_odb_open_rstream", + "git_odb_open_wstream", + "git_odb_read", + "git_odb_read_header", + "git_odb_read_prefix", + "git_odb_refresh", + "git_odb_stream_finalize_write", + "git_odb_stream_free", + "git_odb_stream_read", + "git_odb_stream_write", + "git_odb_write", + "git_odb_write_pack", + "git_repository_odb", + "git_repository_set_odb", + "git_repository_wrap_odb" + ] + } + } + ], + [ + "git_odb_backend", + { + "decl": "git_odb_backend", + "type": "struct", + "value": "git_odb_backend", + "file": "types.h", + "line": 84, + "lineto": 84, + "block": "unsigned int version\ngit_odb * odb\nint (*)(void **, int *, git_otype *, git_odb_backend *, const git_oid *) read\nint (*)(git_oid *, void **, int *, git_otype *, git_odb_backend *, const git_oid *, int) read_prefix\nint (*)(int *, git_otype *, git_odb_backend *, const git_oid *) read_header\nint (*)(git_odb_backend *, const git_oid *, const void *, int, git_otype) write\nint (*)(git_odb_stream **, git_odb_backend *, git_off_t, git_otype) writestream\nint (*)(git_odb_stream **, git_odb_backend *, const git_oid *) readstream\nint (*)(git_odb_backend *, const git_oid *) exists\nint (*)(git_oid *, git_odb_backend *, const git_oid *, int) exists_prefix\nint (*)(git_odb_backend *) refresh\nint (*)(git_odb_backend *, git_odb_foreach_cb, void *) foreach\nint (*)(git_odb_writepack **, git_odb_backend *, git_odb *, git_transfer_progress_cb, void *) writepack\nvoid (*)(git_odb_backend *) free", + "tdef": "typedef", + "description": " A custom backend in an ODB ", + "comments": "", + "fields": [ + { + "type": "unsigned int", + "name": "version", + "comments": "" + }, + { + "type": "git_odb *", + "name": "odb", + "comments": "" + }, + { + "type": "int (*)(void **, int *, git_otype *, git_odb_backend *, const git_oid *)", + "name": "read", + "comments": "" + }, + { + "type": "int (*)(git_oid *, void **, int *, git_otype *, git_odb_backend *, const git_oid *, int)", + "name": "read_prefix", + "comments": "" + }, + { + "type": "int (*)(int *, git_otype *, git_odb_backend *, const git_oid *)", + "name": "read_header", + "comments": "" + }, + { + "type": "int (*)(git_odb_backend *, const git_oid *, const void *, int, git_otype)", + "name": "write", + "comments": " Write an object into the backend. The id of the object has\n already been calculated and is passed in." + }, + { + "type": "int (*)(git_odb_stream **, git_odb_backend *, git_off_t, git_otype)", + "name": "writestream", + "comments": "" + }, + { + "type": "int (*)(git_odb_stream **, git_odb_backend *, const git_oid *)", + "name": "readstream", + "comments": "" + }, + { + "type": "int (*)(git_odb_backend *, const git_oid *)", + "name": "exists", + "comments": "" + }, + { + "type": "int (*)(git_oid *, git_odb_backend *, const git_oid *, int)", + "name": "exists_prefix", + "comments": "" + }, + { + "type": "int (*)(git_odb_backend *)", + "name": "refresh", + "comments": " If the backend implements a refreshing mechanism, it should be exposed\n through this endpoint. Each call to `git_odb_refresh()` will invoke it.\n\n However, the backend implementation should try to stay up-to-date as much\n as possible by itself as libgit2 will not automatically invoke\n `git_odb_refresh()`. For instance, a potential strategy for the backend\n implementation to achieve this could be to internally invoke this\n endpoint on failed lookups (ie. `exists()`, `read()`, `read_header()`)." + }, + { + "type": "int (*)(git_odb_backend *, git_odb_foreach_cb, void *)", + "name": "foreach", + "comments": "" + }, + { + "type": "int (*)(git_odb_writepack **, git_odb_backend *, git_odb *, git_transfer_progress_cb, void *)", + "name": "writepack", + "comments": "" + }, + { + "type": "void (*)(git_odb_backend *)", + "name": "free", + "comments": " Frees any resources held by the odb (including the `git_odb_backend`\n itself). An odb backend implementation must provide this function." + } + ], + "used": { + "returns": [], + "needs": [ + "git_mempack_new", + "git_mempack_reset", + "git_odb_add_alternate", + "git_odb_add_backend", + "git_odb_backend_loose", + "git_odb_backend_one_pack", + "git_odb_backend_pack", + "git_odb_get_backend", + "git_odb_init_backend" + ] + } + } + ], + [ + "git_odb_expand_id", + { + "decl": [ + "git_oid id", + "unsigned short length", + "git_otype type" + ], + "type": "struct", + "value": "git_odb_expand_id", + "file": "odb.h", + "line": 180, + "lineto": 195, + "block": "git_oid id\nunsigned short length\ngit_otype type", + "tdef": "typedef", + "description": " The information about object IDs to query in `git_odb_expand_ids`,\n which will be populated upon return.", + "comments": "", + "fields": [ + { + "type": "git_oid", + "name": "id", + "comments": " The object ID to expand " + }, + { + "type": "unsigned short", + "name": "length", + "comments": " The length of the object ID (in nibbles, or packets of 4 bits; the\n number of hex characters)" + }, + { + "type": "git_otype", + "name": "type", + "comments": " The (optional) type of the object to search for; leave as `0` or set\n to `GIT_OBJ_ANY` to query for any object matching the ID." + } + ], + "used": { + "returns": [], + "needs": [ + "git_odb_expand_ids" + ] + } + } + ], + [ + "git_odb_object", + { + "decl": "git_odb_object", + "type": "struct", + "value": "git_odb_object", + "file": "types.h", + "line": 87, + "lineto": 87, + "tdef": "typedef", + "description": " An object read from the ODB ", + "comments": "", + "used": { + "returns": [], + "needs": [ + "git_odb_object_data", + "git_odb_object_dup", + "git_odb_object_free", + "git_odb_object_id", + "git_odb_object_size", + "git_odb_object_type", + "git_odb_read", + "git_odb_read_prefix" + ] + } + } + ], + [ + "git_odb_stream", + { + "decl": "git_odb_stream", + "type": "struct", + "value": "git_odb_stream", + "file": "types.h", + "line": 90, + "lineto": 90, + "block": "git_odb_backend * backend\nunsigned int mode\nvoid * hash_ctx\ngit_off_t declared_size\ngit_off_t received_bytes\nint (*)(git_odb_stream *, char *, size_t) read\nint (*)(git_odb_stream *, const char *, size_t) write\nint (*)(git_odb_stream *, const int *) finalize_write\nvoid (*)(git_odb_stream *) free", + "tdef": "typedef", + "description": " A stream to read/write from the ODB ", + "comments": "", + "fields": [ + { + "type": "git_odb_backend *", + "name": "backend", + "comments": "" + }, + { + "type": "unsigned int", + "name": "mode", + "comments": "" + }, + { + "type": "void *", + "name": "hash_ctx", + "comments": "" + }, + { + "type": "git_off_t", + "name": "declared_size", + "comments": "" + }, + { + "type": "git_off_t", + "name": "received_bytes", + "comments": "" + }, + { + "type": "int (*)(git_odb_stream *, char *, size_t)", + "name": "read", + "comments": " Write at most `len` bytes into `buffer` and advance the stream." + }, + { + "type": "int (*)(git_odb_stream *, const char *, size_t)", + "name": "write", + "comments": " Write `len` bytes from `buffer` into the stream." + }, + { + "type": "int (*)(git_odb_stream *, const int *)", + "name": "finalize_write", + "comments": " Store the contents of the stream as an object with the id\n specified in `oid`.\n\n This method might not be invoked if:\n - an error occurs earlier with the `write` callback,\n - the object referred to by `oid` already exists in any backend, or\n - the final number of received bytes differs from the size declared\n with `git_odb_open_wstream()`" + }, + { + "type": "void (*)(git_odb_stream *)", + "name": "free", + "comments": " Free the stream's memory.\n\n This method might be called without a call to `finalize_write` if\n an error occurs or if the object is already present in the ODB." + } + ], + "used": { + "returns": [], + "needs": [ + "git_odb_open_rstream", + "git_odb_open_wstream", + "git_odb_stream_finalize_write", + "git_odb_stream_free", + "git_odb_stream_read", + "git_odb_stream_write" + ] + } + } + ], + [ + "git_odb_stream_t", + { + "decl": [ + "GIT_STREAM_RDONLY", + "GIT_STREAM_WRONLY", + "GIT_STREAM_RW" + ], + "type": "enum", + "file": "odb_backend.h", + "line": 70, + "lineto": 74, + "block": "GIT_STREAM_RDONLY\nGIT_STREAM_WRONLY\nGIT_STREAM_RW", + "tdef": "typedef", + "description": " Streaming mode ", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_STREAM_RDONLY", + "comments": "", + "value": 2 + }, + { + "type": "int", + "name": "GIT_STREAM_WRONLY", + "comments": "", + "value": 4 + }, + { + "type": "int", + "name": "GIT_STREAM_RW", + "comments": "", + "value": 6 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_odb_writepack", + { + "decl": "git_odb_writepack", + "type": "struct", + "value": "git_odb_writepack", + "file": "types.h", + "line": 93, + "lineto": 93, + "block": "git_odb_backend * backend\nint (*)(git_odb_writepack *, const void *, size_t, git_transfer_progress *) append\nint (*)(git_odb_writepack *, git_transfer_progress *) commit\nvoid (*)(git_odb_writepack *) free", + "tdef": "typedef", + "description": " A stream to write a packfile to the ODB ", + "comments": "", + "fields": [ + { + "type": "git_odb_backend *", + "name": "backend", + "comments": "" + }, + { + "type": "int (*)(git_odb_writepack *, const void *, size_t, git_transfer_progress *)", + "name": "append", + "comments": "" + }, + { + "type": "int (*)(git_odb_writepack *, git_transfer_progress *)", + "name": "commit", + "comments": "" + }, + { + "type": "void (*)(git_odb_writepack *)", + "name": "free", + "comments": "" + } + ], + "used": { + "returns": [], + "needs": [ + "git_odb_write_pack" + ] + } + } + ], + [ + "git_oid", + { + "decl": [ + "unsigned char [20] id" + ], + "type": "struct", + "value": "git_oid", + "file": "oid.h", + "line": 33, + "lineto": 36, + "block": "unsigned char [20] id", + "tdef": "typedef", + "description": " Unique identity of any object (commit, tree, blob, tag). ", + "comments": "", + "fields": [ + { + "type": "unsigned char [20]", + "name": "id", + "comments": " raw binary formatted id " + } + ], + "used": { + "returns": [ + "git_annotated_commit_id", + "git_blob_id", + "git_commit_id", + "git_commit_parent_id", + "git_commit_tree_id", + "git_filter_source_id", + "git_index_checksum", + "git_indexer_hash", + "git_note_id", + "git_object_id", + "git_odb_object_id", + "git_oid_shorten_new", + "git_packbuilder_hash", + "git_reference_target", + "git_reference_target_peel", + "git_reflog_entry_id_new", + "git_reflog_entry_id_old", + "git_submodule_head_id", + "git_submodule_index_id", + "git_submodule_wd_id", + "git_tag_id", + "git_tag_target_id", + "git_tree_entry_id", + "git_tree_id" + ], + "needs": [ + "git_annotated_commit_from_fetchhead", + "git_annotated_commit_lookup", + "git_blob_create_frombuffer", + "git_blob_create_fromchunks", + "git_blob_create_fromdisk", + "git_blob_create_fromstream_commit", + "git_blob_create_fromworkdir", + "git_blob_lookup", + "git_blob_lookup_prefix", + "git_commit_amend", + "git_commit_create", + "git_commit_create_from_callback", + "git_commit_create_v", + "git_commit_create_with_signature", + "git_commit_extract_signature", + "git_commit_lookup", + "git_commit_lookup_prefix", + "git_graph_ahead_behind", + "git_graph_descendant_of", + "git_index_write_tree", + "git_index_write_tree_to", + "git_merge_base", + "git_merge_base_many", + "git_merge_base_octopus", + "git_merge_bases", + "git_merge_bases_many", + "git_note_create", + "git_note_foreach_cb", + "git_note_next", + "git_note_read", + "git_note_remove", + "git_object_lookup", + "git_object_lookup_prefix", + "git_odb_exists", + "git_odb_exists_prefix", + "git_odb_foreach_cb", + "git_odb_hash", + "git_odb_hashfile", + "git_odb_open_rstream", + "git_odb_read", + "git_odb_read_header", + "git_odb_read_prefix", + "git_odb_stream_finalize_write", + "git_odb_write", + "git_oid_cmp", + "git_oid_cpy", + "git_oid_equal", + "git_oid_fmt", + "git_oid_fromraw", + "git_oid_fromstr", + "git_oid_fromstrn", + "git_oid_fromstrp", + "git_oid_iszero", + "git_oid_ncmp", + "git_oid_nfmt", + "git_oid_pathfmt", + "git_oid_shorten_add", + "git_oid_shorten_free", + "git_oid_strcmp", + "git_oid_streq", + "git_oid_tostr", + "git_oid_tostr_s", + "git_oidarray_free", + "git_packbuilder_insert", + "git_packbuilder_insert_commit", + "git_packbuilder_insert_recur", + "git_packbuilder_insert_tree", + "git_rebase_commit", + "git_reference__alloc", + "git_reference_create", + "git_reference_create_matching", + "git_reference_name_to_id", + "git_reference_set_target", + "git_reflog_append", + "git_repository_hashfile", + "git_repository_set_head_detached", + "git_revwalk_hide", + "git_revwalk_hide_cb", + "git_revwalk_next", + "git_revwalk_push", + "git_tag_annotation_create", + "git_tag_create", + "git_tag_create_frombuffer", + "git_tag_create_lightweight", + "git_tag_lookup", + "git_tag_lookup_prefix", + "git_tree_entry_byid", + "git_tree_lookup", + "git_tree_lookup_prefix", + "git_treebuilder_insert", + "git_treebuilder_write" + ] + } + } + ], + [ + "git_oid_shorten", + { + "decl": "git_oid_shorten", + "type": "struct", + "value": "git_oid_shorten", + "file": "oid.h", + "line": 216, + "lineto": 216, + "tdef": "typedef", + "description": " OID Shortener object", + "comments": "", + "used": { + "returns": [ + "git_oid_shorten_new" + ], + "needs": [ + "git_oid_shorten_add", + "git_oid_shorten_free" + ] + } + } + ], + [ + "git_oidarray", + { + "decl": [ + "git_oid * ids", + "size_t count" + ], + "type": "struct", + "value": "git_oidarray", + "file": "oidarray.h", + "line": 16, + "lineto": 19, + "block": "git_oid * ids\nsize_t count", + "tdef": "typedef", + "description": " Array of object ids ", + "comments": "", + "fields": [ + { + "type": "git_oid *", + "name": "ids", + "comments": "" + }, + { + "type": "size_t", + "name": "count", + "comments": "" + } + ], + "used": { + "returns": [], + "needs": [ + "git_merge_bases", + "git_merge_bases_many", + "git_oidarray_free" + ] + } + } + ], + [ + "git_otype", + { + "decl": [ + "GIT_OBJ_ANY", + "GIT_OBJ_BAD", + "GIT_OBJ__EXT1", + "GIT_OBJ_COMMIT", + "GIT_OBJ_TREE", + "GIT_OBJ_BLOB", + "GIT_OBJ_TAG", + "GIT_OBJ__EXT2", + "GIT_OBJ_OFS_DELTA", + "GIT_OBJ_REF_DELTA" + ], + "type": "enum", + "file": "types.h", + "line": 67, + "lineto": 78, + "block": "GIT_OBJ_ANY\nGIT_OBJ_BAD\nGIT_OBJ__EXT1\nGIT_OBJ_COMMIT\nGIT_OBJ_TREE\nGIT_OBJ_BLOB\nGIT_OBJ_TAG\nGIT_OBJ__EXT2\nGIT_OBJ_OFS_DELTA\nGIT_OBJ_REF_DELTA", + "tdef": "typedef", + "description": " Basic type (loose or packed) of any Git object. ", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_OBJ_ANY", + "comments": "

Object can be any of the following

\n", + "value": -2 + }, + { + "type": "int", + "name": "GIT_OBJ_BAD", + "comments": "

Object is invalid.

\n", + "value": -1 + }, + { + "type": "int", + "name": "GIT_OBJ__EXT1", + "comments": "

Reserved for future use.

\n", + "value": 0 + }, + { + "type": "int", + "name": "GIT_OBJ_COMMIT", + "comments": "

A commit object.

\n", + "value": 1 + }, + { + "type": "int", + "name": "GIT_OBJ_TREE", + "comments": "

A tree (directory listing) object.

\n", + "value": 2 + }, + { + "type": "int", + "name": "GIT_OBJ_BLOB", + "comments": "

A file revision object.

\n", + "value": 3 + }, + { + "type": "int", + "name": "GIT_OBJ_TAG", + "comments": "

An annotated tag object.

\n", + "value": 4 + }, + { + "type": "int", + "name": "GIT_OBJ__EXT2", + "comments": "

Reserved for future use.

\n", + "value": 5 + }, + { + "type": "int", + "name": "GIT_OBJ_OFS_DELTA", + "comments": "

A delta, base is given by an offset.

\n", + "value": 6 + }, + { + "type": "int", + "name": "GIT_OBJ_REF_DELTA", + "comments": "

A delta, base is given by object id.

\n", + "value": 7 + } + ], + "used": { + "returns": [ + "git_object_string2type", + "git_object_type", + "git_odb_object_type", + "git_tag_target_type", + "git_tree_entry_type" + ], + "needs": [ + "git_object__size", + "git_object_lookup", + "git_object_lookup_bypath", + "git_object_lookup_prefix", + "git_object_peel", + "git_object_type2string", + "git_object_typeisloose", + "git_odb_hash", + "git_odb_hashfile", + "git_odb_open_wstream", + "git_odb_read_header", + "git_odb_write", + "git_reference_peel", + "git_repository_hashfile" + ] + } + } + ], + [ + "git_packbuilder", + { + "decl": "git_packbuilder", + "type": "struct", + "value": "git_packbuilder", + "file": "types.h", + "line": 153, + "lineto": 153, + "tdef": "typedef", + "description": " Representation of a git packbuilder ", + "comments": "", + "used": { + "returns": [], + "needs": [ + "git_packbuilder_foreach", + "git_packbuilder_free", + "git_packbuilder_hash", + "git_packbuilder_insert", + "git_packbuilder_insert_commit", + "git_packbuilder_insert_recur", + "git_packbuilder_insert_tree", + "git_packbuilder_insert_walk", + "git_packbuilder_new", + "git_packbuilder_object_count", + "git_packbuilder_set_callbacks", + "git_packbuilder_set_threads", + "git_packbuilder_write", + "git_packbuilder_written" + ] + } + } + ], + [ + "git_packbuilder_stage_t", + { + "decl": [ + "GIT_PACKBUILDER_ADDING_OBJECTS", + "GIT_PACKBUILDER_DELTAFICATION" + ], + "type": "enum", + "file": "pack.h", + "line": 51, + "lineto": 54, + "block": "GIT_PACKBUILDER_ADDING_OBJECTS\nGIT_PACKBUILDER_DELTAFICATION", + "tdef": "typedef", + "description": " Stages that are reported by the packbuilder progress callback.", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_PACKBUILDER_ADDING_OBJECTS", + "comments": "", + "value": 0 + }, + { + "type": "int", + "name": "GIT_PACKBUILDER_DELTAFICATION", + "comments": "", + "value": 1 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_patch", + { + "decl": "git_patch", + "type": "struct", + "value": "git_patch", + "file": "patch.h", + "line": 29, + "lineto": 29, + "tdef": "typedef", + "description": " The diff patch is used to store all the text diffs for a delta.", + "comments": "

You can easily loop over the content of patches and get information about them.

\n", + "used": { + "returns": [], + "needs": [ + "git_patch_free", + "git_patch_from_blob_and_buffer", + "git_patch_from_blobs", + "git_patch_from_buffers", + "git_patch_from_diff", + "git_patch_get_delta", + "git_patch_get_hunk", + "git_patch_get_line_in_hunk", + "git_patch_line_stats", + "git_patch_num_hunks", + "git_patch_num_lines_in_hunk", + "git_patch_print", + "git_patch_size", + "git_patch_to_buf" + ] + } + } + ], + [ + "git_pathspec", + { + "decl": "git_pathspec", + "type": "struct", + "value": "git_pathspec", + "file": "pathspec.h", + "line": 20, + "lineto": 20, + "tdef": "typedef", + "description": " Compiled pathspec", + "comments": "", + "used": { + "returns": [], + "needs": [ + "git_pathspec_free", + "git_pathspec_match_diff", + "git_pathspec_match_index", + "git_pathspec_match_list_diff_entry", + "git_pathspec_match_list_entry", + "git_pathspec_match_list_entrycount", + "git_pathspec_match_list_failed_entry", + "git_pathspec_match_list_failed_entrycount", + "git_pathspec_match_list_free", + "git_pathspec_match_tree", + "git_pathspec_match_workdir", + "git_pathspec_matches_path", + "git_pathspec_new" + ] + } + } + ], + [ + "git_pathspec_flag_t", + { + "decl": [ + "GIT_PATHSPEC_DEFAULT", + "GIT_PATHSPEC_IGNORE_CASE", + "GIT_PATHSPEC_USE_CASE", + "GIT_PATHSPEC_NO_GLOB", + "GIT_PATHSPEC_NO_MATCH_ERROR", + "GIT_PATHSPEC_FIND_FAILURES", + "GIT_PATHSPEC_FAILURES_ONLY" + ], + "type": "enum", + "file": "pathspec.h", + "line": 48, + "lineto": 56, + "block": "GIT_PATHSPEC_DEFAULT\nGIT_PATHSPEC_IGNORE_CASE\nGIT_PATHSPEC_USE_CASE\nGIT_PATHSPEC_NO_GLOB\nGIT_PATHSPEC_NO_MATCH_ERROR\nGIT_PATHSPEC_FIND_FAILURES\nGIT_PATHSPEC_FAILURES_ONLY", + "tdef": "typedef", + "description": " Options controlling how pathspec match should be executed", + "comments": "
    \n
  • GIT_PATHSPEC_IGNORE_CASE forces match to ignore case; otherwise match will use native case sensitivity of platform filesystem - GIT_PATHSPEC_USE_CASE forces case sensitive match; otherwise match will use native case sensitivity of platform filesystem - GIT_PATHSPEC_NO_GLOB disables glob patterns and just uses simple string comparison for matching - GIT_PATHSPEC_NO_MATCH_ERROR means the match functions return error code GIT_ENOTFOUND if no matches are found; otherwise no matches is still success (return 0) but git_pathspec_match_list_entrycount will indicate 0 matches. - GIT_PATHSPEC_FIND_FAILURES means that the git_pathspec_match_list should track which patterns matched which files so that at the end of the match we can identify patterns that did not match any files. - GIT_PATHSPEC_FAILURES_ONLY means that the git_pathspec_match_list does not need to keep the actual matching filenames. Use this to just test if there were any matches at all or in combination with GIT_PATHSPEC_FIND_FAILURES to validate a pathspec.
  • \n
\n", + "fields": [ + { + "type": "int", + "name": "GIT_PATHSPEC_DEFAULT", + "comments": "", + "value": 0 + }, + { + "type": "int", + "name": "GIT_PATHSPEC_IGNORE_CASE", + "comments": "", + "value": 1 + }, + { + "type": "int", + "name": "GIT_PATHSPEC_USE_CASE", + "comments": "", + "value": 2 + }, + { + "type": "int", + "name": "GIT_PATHSPEC_NO_GLOB", + "comments": "", + "value": 4 + }, + { + "type": "int", + "name": "GIT_PATHSPEC_NO_MATCH_ERROR", + "comments": "", + "value": 8 + }, + { + "type": "int", + "name": "GIT_PATHSPEC_FIND_FAILURES", + "comments": "", + "value": 16 + }, + { + "type": "int", + "name": "GIT_PATHSPEC_FAILURES_ONLY", + "comments": "", + "value": 32 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_pathspec_match_list", + { + "decl": "git_pathspec_match_list", + "type": "struct", + "value": "git_pathspec_match_list", + "file": "pathspec.h", + "line": 25, + "lineto": 25, + "tdef": "typedef", + "description": " List of filenames matching a pathspec", + "comments": "", + "used": { + "returns": [], + "needs": [ + "git_pathspec_match_diff", + "git_pathspec_match_index", + "git_pathspec_match_list_diff_entry", + "git_pathspec_match_list_entry", + "git_pathspec_match_list_entrycount", + "git_pathspec_match_list_failed_entry", + "git_pathspec_match_list_failed_entrycount", + "git_pathspec_match_list_free", + "git_pathspec_match_tree", + "git_pathspec_match_workdir" + ] + } + } + ], + [ + "git_proxy_options", + { + "decl": [ + "unsigned int version", + "git_proxy_t type", + "const char * url", + "git_cred_acquire_cb credentials", + "git_transport_certificate_check_cb certificate_check", + "void * payload" + ], + "type": "struct", + "value": "git_proxy_options", + "file": "proxy.h", + "line": 42, + "lineto": 77, + "block": "unsigned int version\ngit_proxy_t type\nconst char * url\ngit_cred_acquire_cb credentials\ngit_transport_certificate_check_cb certificate_check\nvoid * payload", + "tdef": "typedef", + "description": " Options for connecting through a proxy", + "comments": "

Note that not all types may be supported, depending on the platform and compilation options.

\n", + "fields": [ + { + "type": "unsigned int", + "name": "version", + "comments": "" + }, + { + "type": "git_proxy_t", + "name": "type", + "comments": " The type of proxy to use, by URL, auto-detect." + }, + { + "type": "const char *", + "name": "url", + "comments": " The URL of the proxy." + }, + { + "type": "git_cred_acquire_cb", + "name": "credentials", + "comments": " This will be called if the remote host requires\n authentication in order to connect to it.\n\n Returning GIT_PASSTHROUGH will make libgit2 behave as\n though this field isn't set." + }, + { + "type": "git_transport_certificate_check_cb", + "name": "certificate_check", + "comments": " If cert verification fails, this will be called to let the\n user make the final decision of whether to allow the\n connection to proceed. Returns 1 to allow the connection, 0\n to disallow it or a negative value to indicate an error." + }, + { + "type": "void *", + "name": "payload", + "comments": " Payload to be provided to the credentials and certificate\n check callbacks." + } + ], + "used": { + "returns": [], + "needs": [ + "git_proxy_init_options", + "git_remote_connect" + ] + } + } + ], + [ + "git_proxy_t", + { + "decl": [ + "GIT_PROXY_NONE", + "GIT_PROXY_AUTO", + "GIT_PROXY_SPECIFIED" + ], + "type": "enum", + "file": "proxy.h", + "line": 18, + "lineto": 34, + "block": "GIT_PROXY_NONE\nGIT_PROXY_AUTO\nGIT_PROXY_SPECIFIED", + "tdef": "typedef", + "description": " The type of proxy to use.", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_PROXY_NONE", + "comments": "

Do not attempt to connect through a proxy

\n\n

If built against lbicurl, it itself may attempt to connect\n to a proxy if the environment variables specify it.

\n", + "value": 0 + }, + { + "type": "int", + "name": "GIT_PROXY_AUTO", + "comments": "

Try to auto-detect the proxy from the git configuration.

\n", + "value": 1 + }, + { + "type": "int", + "name": "GIT_PROXY_SPECIFIED", + "comments": "

Connect via the URL given in the options

\n", + "value": 2 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_push", + { + "decl": "git_push", + "type": "struct", + "value": "git_push", + "file": "types.h", + "line": 236, + "lineto": 236, + "tdef": "typedef", + "description": " Preparation for a push operation. Can be used to configure what to\n push and the level of parallelism of the packfile builder.", + "comments": "", + "used": { + "returns": [], + "needs": [ + "git_push_init_options", + "git_push_negotiation", + "git_remote_push", + "git_remote_upload" + ] + } + } + ], + [ + "git_push_options", + { + "decl": [ + "unsigned int version", + "unsigned int pb_parallelism", + "git_remote_callbacks callbacks", + "git_proxy_options proxy_opts", + "git_strarray custom_headers" + ], + "type": "struct", + "value": "git_push_options", + "file": "remote.h", + "line": 585, + "lineto": 612, + "block": "unsigned int version\nunsigned int pb_parallelism\ngit_remote_callbacks callbacks\ngit_proxy_options proxy_opts\ngit_strarray custom_headers", + "tdef": "typedef", + "description": " Controls the behavior of a git_push object.", + "comments": "", + "fields": [ + { + "type": "unsigned int", + "name": "version", + "comments": "" + }, + { + "type": "unsigned int", + "name": "pb_parallelism", + "comments": " If the transport being used to push to the remote requires the creation\n of a pack file, this controls the number of worker threads used by\n the packbuilder when creating that pack file to be sent to the remote.\n\n If set to 0, the packbuilder will auto-detect the number of threads\n to create. The default value is 1." + }, + { + "type": "git_remote_callbacks", + "name": "callbacks", + "comments": " Callbacks to use for this push operation" + }, + { + "type": "git_proxy_options", + "name": "proxy_opts", + "comments": " Proxy options to use, by default no proxy is used." + }, + { + "type": "git_strarray", + "name": "custom_headers", + "comments": " Extra headers for this push operation" + } + ], + "used": { + "returns": [], + "needs": [ + "git_push_init_options", + "git_remote_push", + "git_remote_upload" + ] + } + } + ], + [ + "git_push_update", + { + "decl": [ + "char * src_refname", + "char * dst_refname", + "git_oid src", + "git_oid dst" + ], + "type": "struct", + "value": "git_push_update", + "file": "remote.h", + "line": 343, + "lineto": 360, + "block": "char * src_refname\nchar * dst_refname\ngit_oid src\ngit_oid dst", + "tdef": "typedef", + "description": " Represents an update which will be performed on the remote during push", + "comments": "", + "fields": [ + { + "type": "char *", + "name": "src_refname", + "comments": " The source name of the reference" + }, + { + "type": "char *", + "name": "dst_refname", + "comments": " The name of the reference to update on the server" + }, + { + "type": "git_oid", + "name": "src", + "comments": " The current target of the reference" + }, + { + "type": "git_oid", + "name": "dst", + "comments": " The new target for the reference" + } + ], + "used": { + "returns": [], + "needs": [ + "git_push_negotiation" + ] + } + } + ], + [ + "git_rebase", + { + "decl": "git_rebase", + "type": "struct", + "value": "git_rebase", + "file": "types.h", + "line": 187, + "lineto": 187, + "tdef": "typedef", + "description": " Representation of a rebase ", + "comments": "", + "used": { + "returns": [ + "git_rebase_operation_byindex" + ], + "needs": [ + "git_rebase_abort", + "git_rebase_commit", + "git_rebase_finish", + "git_rebase_free", + "git_rebase_init", + "git_rebase_init_options", + "git_rebase_inmemory_index", + "git_rebase_next", + "git_rebase_open", + "git_rebase_operation_byindex", + "git_rebase_operation_current", + "git_rebase_operation_entrycount" + ] + } + } + ], + [ + "git_rebase_operation", + { + "decl": [ + "git_rebase_operation_t type", + "const git_oid id", + "const char * exec" + ], + "type": "struct", + "value": "git_rebase_operation", + "file": "rebase.h", + "line": 130, + "lineto": 145, + "block": "git_rebase_operation_t type\nconst git_oid id\nconst char * exec", + "tdef": "typedef", + "description": " A rebase operation", + "comments": "

Describes a single instruction/operation to be performed during the rebase.

\n", + "fields": [ + { + "type": "git_rebase_operation_t", + "name": "type", + "comments": " The type of rebase operation. " + }, + { + "type": "const git_oid", + "name": "id", + "comments": " The commit ID being cherry-picked. This will be populated for\n all operations except those of type `GIT_REBASE_OPERATION_EXEC`." + }, + { + "type": "const char *", + "name": "exec", + "comments": " The executable the user has requested be run. This will only\n be populated for operations of type `GIT_REBASE_OPERATION_EXEC`." + } + ], + "used": { + "returns": [ + "git_rebase_operation_byindex" + ], + "needs": [ + "git_rebase_next" + ] + } + } + ], + [ + "git_rebase_operation_t", + { + "decl": [ + "GIT_REBASE_OPERATION_PICK", + "GIT_REBASE_OPERATION_REWORD", + "GIT_REBASE_OPERATION_EDIT", + "GIT_REBASE_OPERATION_SQUASH", + "GIT_REBASE_OPERATION_FIXUP", + "GIT_REBASE_OPERATION_EXEC" + ], + "type": "enum", + "file": "rebase.h", + "line": 78, + "lineto": 114, + "block": "GIT_REBASE_OPERATION_PICK\nGIT_REBASE_OPERATION_REWORD\nGIT_REBASE_OPERATION_EDIT\nGIT_REBASE_OPERATION_SQUASH\nGIT_REBASE_OPERATION_FIXUP\nGIT_REBASE_OPERATION_EXEC", + "tdef": "typedef", + "description": " Type of rebase operation in-progress after calling `git_rebase_next`.", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_REBASE_OPERATION_PICK", + "comments": "

The given commit is to be cherry-picked. The client should commit\n the changes and continue if there are no conflicts.

\n", + "value": 0 + }, + { + "type": "int", + "name": "GIT_REBASE_OPERATION_REWORD", + "comments": "

The given commit is to be cherry-picked, but the client should prompt\n the user to provide an updated commit message.

\n", + "value": 1 + }, + { + "type": "int", + "name": "GIT_REBASE_OPERATION_EDIT", + "comments": "

The given commit is to be cherry-picked, but the client should stop\n to allow the user to edit the changes before committing them.

\n", + "value": 2 + }, + { + "type": "int", + "name": "GIT_REBASE_OPERATION_SQUASH", + "comments": "

The given commit is to be squashed into the previous commit. The\n commit message will be merged with the previous message.

\n", + "value": 3 + }, + { + "type": "int", + "name": "GIT_REBASE_OPERATION_FIXUP", + "comments": "

The given commit is to be squashed into the previous commit. The\n commit message from this commit will be discarded.

\n", + "value": 4 + }, + { + "type": "int", + "name": "GIT_REBASE_OPERATION_EXEC", + "comments": "

No commit will be cherry-picked. The client should run the given\n command and (if successful) continue.

\n", + "value": 5 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_ref_t", + { + "decl": [ + "GIT_REF_INVALID", + "GIT_REF_OID", + "GIT_REF_SYMBOLIC", + "GIT_REF_LISTALL" + ], + "type": "enum", + "file": "types.h", + "line": 190, + "lineto": 195, + "block": "GIT_REF_INVALID\nGIT_REF_OID\nGIT_REF_SYMBOLIC\nGIT_REF_LISTALL", + "tdef": "typedef", + "description": " Basic type of any Git reference. ", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_REF_INVALID", + "comments": "

Invalid reference

\n", + "value": 0 + }, + { + "type": "int", + "name": "GIT_REF_OID", + "comments": "

A reference which points at an object id

\n", + "value": 1 + }, + { + "type": "int", + "name": "GIT_REF_SYMBOLIC", + "comments": "

A reference which points at another reference

\n", + "value": 2 + }, + { + "type": "int", + "name": "GIT_REF_LISTALL", + "comments": "", + "value": 3 + } + ], + "used": { + "returns": [ + "git_reference_type" + ], + "needs": [] + } + } + ], + [ + "git_refdb", + { + "decl": "git_refdb", + "type": "struct", + "value": "git_refdb", + "file": "types.h", + "line": 96, + "lineto": 96, + "tdef": "typedef", + "description": " An open refs database handle. ", + "comments": "", + "used": { + "returns": [], + "needs": [ + "git_refdb_backend_fs", + "git_refdb_compress", + "git_refdb_free", + "git_refdb_init_backend", + "git_refdb_new", + "git_refdb_open", + "git_refdb_set_backend", + "git_repository_refdb", + "git_repository_set_refdb" + ] + } + } + ], + [ + "git_refdb_backend", + { + "decl": "git_refdb_backend", + "type": "struct", + "value": "git_refdb_backend", + "file": "types.h", + "line": 99, + "lineto": 99, + "block": "unsigned int version\nint (*)(int *, git_refdb_backend *, const char *) exists\nint (*)(git_reference **, git_refdb_backend *, const char *) lookup\nint (*)(git_reference_iterator **, struct git_refdb_backend *, const char *) iterator\nint (*)(git_refdb_backend *, const git_reference *, int, const git_signature *, const char *, const git_oid *, const char *) write\nint (*)(git_reference **, git_refdb_backend *, const char *, const char *, int, const git_signature *, const char *) rename\nint (*)(git_refdb_backend *, const char *, const git_oid *, const char *) del\nint (*)(git_refdb_backend *) compress\nint (*)(git_refdb_backend *, const char *) has_log\nint (*)(git_refdb_backend *, const char *) ensure_log\nvoid (*)(git_refdb_backend *) free\nint (*)(git_reflog **, git_refdb_backend *, const char *) reflog_read\nint (*)(git_refdb_backend *, git_reflog *) reflog_write\nint (*)(git_refdb_backend *, const char *, const char *) reflog_rename\nint (*)(git_refdb_backend *, const char *) reflog_delete\nint (*)(void **, git_refdb_backend *, const char *) lock\nint (*)(git_refdb_backend *, void *, int, int, const git_reference *, const git_signature *, const char *) unlock", + "tdef": "typedef", + "description": " A custom backend for refs ", + "comments": "", + "fields": [ + { + "type": "unsigned int", + "name": "version", + "comments": "" + }, + { + "type": "int (*)(int *, git_refdb_backend *, const char *)", + "name": "exists", + "comments": " Queries the refdb backend to determine if the given ref_name\n exists. A refdb implementation must provide this function." + }, + { + "type": "int (*)(git_reference **, git_refdb_backend *, const char *)", + "name": "lookup", + "comments": " Queries the refdb backend for a given reference. A refdb\n implementation must provide this function." + }, + { + "type": "int (*)(git_reference_iterator **, struct git_refdb_backend *, const char *)", + "name": "iterator", + "comments": " Allocate an iterator object for the backend.\n\n A refdb implementation must provide this function." + }, + { + "type": "int (*)(git_refdb_backend *, const git_reference *, int, const git_signature *, const char *, const git_oid *, const char *)", + "name": "write", + "comments": "" + }, + { + "type": "int (*)(git_reference **, git_refdb_backend *, const char *, const char *, int, const git_signature *, const char *)", + "name": "rename", + "comments": "" + }, + { + "type": "int (*)(git_refdb_backend *, const char *, const git_oid *, const char *)", + "name": "del", + "comments": " Deletes the given reference (and if necessary its reflog)\n from the refdb. A refdb implementation must provide this\n function." + }, + { + "type": "int (*)(git_refdb_backend *)", + "name": "compress", + "comments": " Suggests that the given refdb compress or optimize its references.\n This mechanism is implementation specific. (For on-disk reference\n databases, this may pack all loose references.) A refdb\n implementation may provide this function; if it is not provided,\n nothing will be done." + }, + { + "type": "int (*)(git_refdb_backend *, const char *)", + "name": "has_log", + "comments": " Query whether a particular reference has a log (may be empty)" + }, + { + "type": "int (*)(git_refdb_backend *, const char *)", + "name": "ensure_log", + "comments": " Make sure a particular reference will have a reflog which\n will be appended to on writes." + }, + { + "type": "void (*)(git_refdb_backend *)", + "name": "free", + "comments": " Frees any resources held by the refdb (including the `git_refdb_backend`\n itself). A refdb backend implementation must provide this function." + }, + { + "type": "int (*)(git_reflog **, git_refdb_backend *, const char *)", + "name": "reflog_read", + "comments": " Read the reflog for the given reference name." + }, + { + "type": "int (*)(git_refdb_backend *, git_reflog *)", + "name": "reflog_write", + "comments": " Write a reflog to disk." + }, + { + "type": "int (*)(git_refdb_backend *, const char *, const char *)", + "name": "reflog_rename", + "comments": " Rename a reflog" + }, + { + "type": "int (*)(git_refdb_backend *, const char *)", + "name": "reflog_delete", + "comments": " Remove a reflog." + }, + { + "type": "int (*)(void **, git_refdb_backend *, const char *)", + "name": "lock", + "comments": " Lock a reference. The opaque parameter will be passed to the unlock function" + }, + { + "type": "int (*)(git_refdb_backend *, void *, int, int, const git_reference *, const git_signature *, const char *)", + "name": "unlock", + "comments": " Unlock a reference. Only one of target or symbolic_target\n will be set. success indicates whether to update the\n reference or discard the lock (if it's false)" + } + ], + "used": { + "returns": [], + "needs": [ + "git_refdb_backend_fs", + "git_refdb_init_backend", + "git_refdb_set_backend" + ] + } + } + ], + [ + "git_reference", + { + "decl": "git_reference", + "type": "struct", + "value": "git_reference", + "file": "types.h", + "line": 169, + "lineto": 169, + "tdef": "typedef", + "description": " In-memory representation of a reference. ", + "comments": "", + "used": { + "returns": [ + "git_reference__alloc", + "git_reference__alloc_symbolic" + ], + "needs": [ + "git_annotated_commit_from_ref", + "git_branch_create", + "git_branch_create_from_annotated", + "git_branch_delete", + "git_branch_is_head", + "git_branch_lookup", + "git_branch_move", + "git_branch_name", + "git_branch_next", + "git_branch_set_upstream", + "git_branch_upstream", + "git_reference_cmp", + "git_reference_create", + "git_reference_create_matching", + "git_reference_delete", + "git_reference_dwim", + "git_reference_foreach", + "git_reference_foreach_glob", + "git_reference_foreach_name", + "git_reference_free", + "git_reference_is_branch", + "git_reference_is_note", + "git_reference_is_remote", + "git_reference_is_tag", + "git_reference_iterator_free", + "git_reference_iterator_glob_new", + "git_reference_iterator_new", + "git_reference_lookup", + "git_reference_name", + "git_reference_next", + "git_reference_next_name", + "git_reference_owner", + "git_reference_peel", + "git_reference_rename", + "git_reference_resolve", + "git_reference_set_target", + "git_reference_shorthand", + "git_reference_symbolic_create", + "git_reference_symbolic_create_matching", + "git_reference_symbolic_set_target", + "git_reference_symbolic_target", + "git_reference_target", + "git_reference_target_peel", + "git_reference_type", + "git_repository_head", + "git_revparse_ext" + ] + } + } + ], + [ + "git_reference_iterator", + { + "decl": "git_reference_iterator", + "type": "struct", + "value": "git_reference_iterator", + "file": "types.h", + "line": 172, + "lineto": 172, + "block": "git_refdb * db\nint (*)(git_reference **, git_reference_iterator *) next\nint (*)(const char **, git_reference_iterator *) next_name\nvoid (*)(git_reference_iterator *) free", + "tdef": "typedef", + "description": " Iterator for references ", + "comments": "", + "fields": [ + { + "type": "git_refdb *", + "name": "db", + "comments": "" + }, + { + "type": "int (*)(git_reference **, git_reference_iterator *)", + "name": "next", + "comments": " Return the current reference and advance the iterator." + }, + { + "type": "int (*)(const char **, git_reference_iterator *)", + "name": "next_name", + "comments": " Return the name of the current reference and advance the iterator" + }, + { + "type": "void (*)(git_reference_iterator *)", + "name": "free", + "comments": " Free the iterator" + } + ], + "used": { + "returns": [], + "needs": [ + "git_reference_iterator_free", + "git_reference_iterator_glob_new", + "git_reference_iterator_new", + "git_reference_next", + "git_reference_next_name" + ] + } + } + ], + [ + "git_reference_normalize_t", + { + "decl": [ + "GIT_REF_FORMAT_NORMAL", + "GIT_REF_FORMAT_ALLOW_ONELEVEL", + "GIT_REF_FORMAT_REFSPEC_PATTERN", + "GIT_REF_FORMAT_REFSPEC_SHORTHAND" + ], + "type": "enum", + "file": "refs.h", + "line": 625, + "lineto": 654, + "block": "GIT_REF_FORMAT_NORMAL\nGIT_REF_FORMAT_ALLOW_ONELEVEL\nGIT_REF_FORMAT_REFSPEC_PATTERN\nGIT_REF_FORMAT_REFSPEC_SHORTHAND", + "tdef": "typedef", + "description": " Normalization options for reference lookup", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_REF_FORMAT_NORMAL", + "comments": "

No particular normalization.

\n", + "value": 0 + }, + { + "type": "int", + "name": "GIT_REF_FORMAT_ALLOW_ONELEVEL", + "comments": "

Control whether one-level refnames are accepted\n (i.e., refnames that do not contain multiple /-separated\n components). Those are expected to be written only using\n uppercase letters and underscore (FETCH_HEAD, ...)

\n", + "value": 1 + }, + { + "type": "int", + "name": "GIT_REF_FORMAT_REFSPEC_PATTERN", + "comments": "

Interpret the provided name as a reference pattern for a\n refspec (as used with remote repositories). If this option\n is enabled, the name is allowed to contain a single * (\n<star

\n\n
\n

)\n in place of a one full pathname component\n (e.g., foo/\n<star\n/bar but not foo/bar\n<star\n).

\n
\n", + "value": 2 + }, + { + "type": "int", + "name": "GIT_REF_FORMAT_REFSPEC_SHORTHAND", + "comments": "

Interpret the name as part of a refspec in shorthand form\n so the ONELEVEL naming rules aren't enforced and 'master'\n becomes a valid name.

\n", + "value": 4 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_reflog", + { + "decl": "git_reflog", + "type": "struct", + "value": "git_reflog", + "file": "types.h", + "line": 147, + "lineto": 147, + "tdef": "typedef", + "description": " Representation of a reference log ", + "comments": "", + "used": { + "returns": [ + "git_reflog_entry_byindex" + ], + "needs": [ + "git_reflog_append", + "git_reflog_drop", + "git_reflog_entry_byindex", + "git_reflog_entry_committer", + "git_reflog_entry_id_new", + "git_reflog_entry_id_old", + "git_reflog_entry_message", + "git_reflog_entrycount", + "git_reflog_free", + "git_reflog_read", + "git_reflog_write" + ] + } + } + ], + [ + "git_reflog_entry", + { + "decl": "git_reflog_entry", + "type": "struct", + "value": "git_reflog_entry", + "file": "types.h", + "line": 144, + "lineto": 144, + "tdef": "typedef", + "description": " Representation of a reference log entry ", + "comments": "", + "used": { + "returns": [ + "git_reflog_entry_byindex" + ], + "needs": [ + "git_reflog_entry_committer", + "git_reflog_entry_id_new", + "git_reflog_entry_id_old", + "git_reflog_entry_message" + ] + } + } + ], + [ + "git_remote", + { + "decl": "git_remote", + "type": "struct", + "value": "git_remote", + "file": "types.h", + "line": 224, + "lineto": 224, + "tdef": "typedef", + "description": " Git's idea of a remote repository. A remote can be anonymous (in\n which case it does not have backing configuration entires).", + "comments": "", + "used": { + "returns": [ + "git_remote_autotag" + ], + "needs": [ + "git_headlist_cb", + "git_remote_autotag", + "git_remote_connect", + "git_remote_connected", + "git_remote_create", + "git_remote_create_anonymous", + "git_remote_create_cb", + "git_remote_create_with_fetchspec", + "git_remote_default_branch", + "git_remote_disconnect", + "git_remote_download", + "git_remote_dup", + "git_remote_fetch", + "git_remote_free", + "git_remote_get_fetch_refspecs", + "git_remote_get_push_refspecs", + "git_remote_get_refspec", + "git_remote_init_callbacks", + "git_remote_lookup", + "git_remote_ls", + "git_remote_name", + "git_remote_owner", + "git_remote_prune", + "git_remote_prune_refs", + "git_remote_push", + "git_remote_pushurl", + "git_remote_refspec_count", + "git_remote_set_autotag", + "git_remote_stats", + "git_remote_stop", + "git_remote_update_tips", + "git_remote_upload", + "git_remote_url", + "git_transport_cb", + "git_transport_dummy", + "git_transport_local", + "git_transport_new", + "git_transport_smart", + "git_transport_ssh_with_paths" + ] + } + } + ], + [ + "git_remote_autotag_option_t", + { + "decl": [ + "GIT_REMOTE_DOWNLOAD_TAGS_UNSPECIFIED", + "GIT_REMOTE_DOWNLOAD_TAGS_AUTO", + "GIT_REMOTE_DOWNLOAD_TAGS_NONE", + "GIT_REMOTE_DOWNLOAD_TAGS_ALL" + ], + "type": "enum", + "file": "remote.h", + "line": 497, + "lineto": 515, + "block": "GIT_REMOTE_DOWNLOAD_TAGS_UNSPECIFIED\nGIT_REMOTE_DOWNLOAD_TAGS_AUTO\nGIT_REMOTE_DOWNLOAD_TAGS_NONE\nGIT_REMOTE_DOWNLOAD_TAGS_ALL", + "tdef": "typedef", + "description": " Automatic tag following option", + "comments": "

Lets us select the --tags option to use.

\n", + "fields": [ + { + "type": "int", + "name": "GIT_REMOTE_DOWNLOAD_TAGS_UNSPECIFIED", + "comments": "

Use the setting from the configuration.

\n", + "value": 0 + }, + { + "type": "int", + "name": "GIT_REMOTE_DOWNLOAD_TAGS_AUTO", + "comments": "

Ask the server for tags pointing to objects we're already\n downloading.

\n", + "value": 1 + }, + { + "type": "int", + "name": "GIT_REMOTE_DOWNLOAD_TAGS_NONE", + "comments": "

Don't ask for any tags beyond the refspecs.

\n", + "value": 2 + }, + { + "type": "int", + "name": "GIT_REMOTE_DOWNLOAD_TAGS_ALL", + "comments": "

Ask for the all the tags.

\n", + "value": 3 + } + ], + "used": { + "returns": [ + "git_remote_autotag" + ], + "needs": [ + "git_remote_set_autotag", + "git_remote_update_tips" + ] + } + } + ], + [ + "git_remote_callbacks", + { + "decl": [ + "unsigned int version", + "git_transport_message_cb sideband_progress", + "int (*)(git_remote_completion_type, void *) completion", + "git_cred_acquire_cb credentials", + "git_transport_certificate_check_cb certificate_check", + "git_transfer_progress_cb transfer_progress", + "int (*)(const char *, const git_oid *, const git_oid *, void *) update_tips", + "git_packbuilder_progress pack_progress", + "git_push_transfer_progress push_transfer_progress", + "int (*)(const char *, const char *, void *) push_update_reference", + "git_push_negotiation push_negotiation", + "git_transport_cb transport", + "void * payload" + ], + "type": "struct", + "value": "git_remote_callbacks", + "file": "remote.h", + "line": 376, + "lineto": 460, + "block": "unsigned int version\ngit_transport_message_cb sideband_progress\nint (*)(git_remote_completion_type, void *) completion\ngit_cred_acquire_cb credentials\ngit_transport_certificate_check_cb certificate_check\ngit_transfer_progress_cb transfer_progress\nint (*)(const char *, const git_oid *, const git_oid *, void *) update_tips\ngit_packbuilder_progress pack_progress\ngit_push_transfer_progress push_transfer_progress\nint (*)(const char *, const char *, void *) push_update_reference\ngit_push_negotiation push_negotiation\ngit_transport_cb transport\nvoid * payload", + "tdef": null, + "description": " The callback settings structure", + "comments": "

Set the callbacks to be called by the remote when informing the user about the progress of the network operations.

\n", + "fields": [ + { + "type": "unsigned int", + "name": "version", + "comments": "" + }, + { + "type": "git_transport_message_cb", + "name": "sideband_progress", + "comments": " Textual progress from the remote. Text send over the\n progress side-band will be passed to this function (this is\n the 'counting objects' output)." + }, + { + "type": "int (*)(git_remote_completion_type, void *)", + "name": "completion", + "comments": " Completion is called when different parts of the download\n process are done (currently unused)." + }, + { + "type": "git_cred_acquire_cb", + "name": "credentials", + "comments": " This will be called if the remote host requires\n authentication in order to connect to it.\n\n Returning GIT_PASSTHROUGH will make libgit2 behave as\n though this field isn't set." + }, + { + "type": "git_transport_certificate_check_cb", + "name": "certificate_check", + "comments": " If cert verification fails, this will be called to let the\n user make the final decision of whether to allow the\n connection to proceed. Returns 1 to allow the connection, 0\n to disallow it or a negative value to indicate an error." + }, + { + "type": "git_transfer_progress_cb", + "name": "transfer_progress", + "comments": " During the download of new data, this will be regularly\n called with the current count of progress done by the\n indexer." + }, + { + "type": "int (*)(const char *, const git_oid *, const git_oid *, void *)", + "name": "update_tips", + "comments": " Each time a reference is updated locally, this function\n will be called with information about it." + }, + { + "type": "git_packbuilder_progress", + "name": "pack_progress", + "comments": " Function to call with progress information during pack\n building. Be aware that this is called inline with pack\n building operations, so performance may be affected." + }, + { + "type": "git_push_transfer_progress", + "name": "push_transfer_progress", + "comments": " Function to call with progress information during the\n upload portion of a push. Be aware that this is called\n inline with pack building operations, so performance may be\n affected." + }, + { + "type": "int (*)(const char *, const char *, void *)", + "name": "push_update_reference", + "comments": " Called for each updated reference on push. If `status` is\n not `NULL`, the update was rejected by the remote server\n and `status` contains the reason given." + }, + { + "type": "git_push_negotiation", + "name": "push_negotiation", + "comments": " Called once between the negotiation step and the upload. It\n provides information about what updates will be performed." + }, + { + "type": "git_transport_cb", + "name": "transport", + "comments": " Create the transport to use for this operation. Leave NULL\n to auto-detect." + }, + { + "type": "void *", + "name": "payload", + "comments": " This will be passed to each of the callbacks in this struct\n as the last parameter." + } + ], + "used": { + "returns": [], + "needs": [ + "git_remote_connect", + "git_remote_init_callbacks", + "git_remote_prune", + "git_remote_update_tips" + ] + } + } + ], + [ + "git_remote_completion_type", + { + "decl": [ + "GIT_REMOTE_COMPLETION_DOWNLOAD", + "GIT_REMOTE_COMPLETION_INDEXING", + "GIT_REMOTE_COMPLETION_ERROR" + ], + "type": "enum", + "file": "remote.h", + "line": 328, + "lineto": 332, + "block": "GIT_REMOTE_COMPLETION_DOWNLOAD\nGIT_REMOTE_COMPLETION_INDEXING\nGIT_REMOTE_COMPLETION_ERROR\nGIT_REMOTE_COMPLETION_DOWNLOAD\nGIT_REMOTE_COMPLETION_INDEXING\nGIT_REMOTE_COMPLETION_ERROR", + "tdef": "typedef", + "description": " Argument to the completion callback which tells it which operation\n finished.", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_REMOTE_COMPLETION_DOWNLOAD", + "comments": "", + "value": 0 + }, + { + "type": "int", + "name": "GIT_REMOTE_COMPLETION_INDEXING", + "comments": "", + "value": 1 + }, + { + "type": "int", + "name": "GIT_REMOTE_COMPLETION_ERROR", + "comments": "", + "value": 2 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_remote_head", + { + "decl": [ + "int local", + "git_oid oid", + "git_oid loid", + "char * name", + "char * symref_target" + ], + "type": "struct", + "value": "git_remote_head", + "file": "net.h", + "line": 40, + "lineto": 50, + "block": "int local\ngit_oid oid\ngit_oid loid\nchar * name\nchar * symref_target", + "tdef": null, + "description": " Description of a reference advertised by a remote server, given out\n on `ls` calls.", + "comments": "", + "fields": [ + { + "type": "int", + "name": "local", + "comments": "" + }, + { + "type": "git_oid", + "name": "oid", + "comments": "" + }, + { + "type": "git_oid", + "name": "loid", + "comments": "" + }, + { + "type": "char *", + "name": "name", + "comments": "" + }, + { + "type": "char *", + "name": "symref_target", + "comments": " If the server send a symref mapping for this ref, this will\n point to the target." + } + ], + "used": { + "returns": [], + "needs": [ + "git_headlist_cb", + "git_remote_ls" + ] + } + } + ], + [ + "git_repository", + { + "decl": "git_repository", + "type": "struct", + "value": "git_repository", + "file": "types.h", + "line": 105, + "lineto": 105, + "tdef": "typedef", + "description": " Representation of an existing git repository,\n including all its object contents", + "comments": "", + "used": { + "returns": [ + "git_blob_owner", + "git_commit_owner", + "git_filter_source_repo", + "git_index_owner", + "git_object_owner", + "git_reference_owner", + "git_remote_owner", + "git_revwalk_repository", + "git_submodule_owner", + "git_tag_owner", + "git_tree_owner" + ], + "needs": [ + "git_annotated_commit_from_fetchhead", + "git_annotated_commit_from_ref", + "git_annotated_commit_from_revspec", + "git_annotated_commit_lookup", + "git_attr_add_macro", + "git_attr_cache_flush", + "git_attr_foreach", + "git_attr_get", + "git_attr_get_many", + "git_blame_file", + "git_blob_create_frombuffer", + "git_blob_create_fromchunks", + "git_blob_create_fromdisk", + "git_blob_create_fromstream", + "git_blob_create_fromworkdir", + "git_blob_lookup", + "git_blob_lookup_prefix", + "git_branch_create", + "git_branch_create_from_annotated", + "git_branch_iterator_new", + "git_branch_lookup", + "git_checkout_head", + "git_checkout_index", + "git_checkout_tree", + "git_cherrypick", + "git_cherrypick_commit", + "git_clone", + "git_commit_create", + "git_commit_create_buffer", + "git_commit_create_from_callback", + "git_commit_create_v", + "git_commit_create_with_signature", + "git_commit_extract_signature", + "git_commit_lookup", + "git_commit_lookup_prefix", + "git_describe_workdir", + "git_diff_commit_as_email", + "git_diff_index_to_index", + "git_diff_index_to_workdir", + "git_diff_tree_to_index", + "git_diff_tree_to_tree", + "git_diff_tree_to_workdir", + "git_diff_tree_to_workdir_with_index", + "git_filter_list_apply_to_file", + "git_filter_list_load", + "git_filter_list_new", + "git_filter_list_stream_file", + "git_graph_ahead_behind", + "git_graph_descendant_of", + "git_ignore_add_rule", + "git_ignore_clear_internal_rules", + "git_ignore_path_is_ignored", + "git_index_write_tree_to", + "git_merge", + "git_merge_analysis", + "git_merge_base", + "git_merge_base_many", + "git_merge_base_octopus", + "git_merge_bases", + "git_merge_bases_many", + "git_merge_commits", + "git_merge_file_from_index", + "git_merge_trees", + "git_note_create", + "git_note_foreach", + "git_note_iterator_new", + "git_note_read", + "git_note_remove", + "git_object_lookup", + "git_object_lookup_prefix", + "git_packbuilder_new", + "git_pathspec_match_workdir", + "git_rebase_init", + "git_rebase_open", + "git_refdb_backend_fs", + "git_refdb_new", + "git_refdb_open", + "git_reference_create", + "git_reference_create_matching", + "git_reference_dwim", + "git_reference_ensure_log", + "git_reference_foreach", + "git_reference_foreach_glob", + "git_reference_foreach_name", + "git_reference_has_log", + "git_reference_iterator_glob_new", + "git_reference_iterator_new", + "git_reference_list", + "git_reference_lookup", + "git_reference_name_to_id", + "git_reference_remove", + "git_reference_symbolic_create", + "git_reference_symbolic_create_matching", + "git_reflog_delete", + "git_reflog_read", + "git_reflog_rename", + "git_remote_add_fetch", + "git_remote_add_push", + "git_remote_create", + "git_remote_create_anonymous", + "git_remote_create_cb", + "git_remote_create_with_fetchspec", + "git_remote_delete", + "git_remote_list", + "git_remote_lookup", + "git_remote_rename", + "git_remote_set_autotag", + "git_remote_set_pushurl", + "git_remote_set_url", + "git_repository__cleanup", + "git_repository_config", + "git_repository_config_snapshot", + "git_repository_create_cb", + "git_repository_detach_head", + "git_repository_fetchhead_foreach", + "git_repository_free", + "git_repository_get_namespace", + "git_repository_hashfile", + "git_repository_head", + "git_repository_head_detached", + "git_repository_head_unborn", + "git_repository_ident", + "git_repository_index", + "git_repository_init", + "git_repository_init_ext", + "git_repository_init_init_options", + "git_repository_is_bare", + "git_repository_is_empty", + "git_repository_is_shallow", + "git_repository_mergehead_foreach", + "git_repository_message", + "git_repository_message_remove", + "git_repository_new", + "git_repository_odb", + "git_repository_open", + "git_repository_open_bare", + "git_repository_open_ext", + "git_repository_path", + "git_repository_refdb", + "git_repository_reinit_filesystem", + "git_repository_set_bare", + "git_repository_set_config", + "git_repository_set_head", + "git_repository_set_head_detached", + "git_repository_set_head_detached_from_annotated", + "git_repository_set_ident", + "git_repository_set_index", + "git_repository_set_namespace", + "git_repository_set_odb", + "git_repository_set_refdb", + "git_repository_set_workdir", + "git_repository_state", + "git_repository_state_cleanup", + "git_repository_workdir", + "git_repository_wrap_odb", + "git_reset", + "git_reset_default", + "git_reset_from_annotated", + "git_revert", + "git_revert_commit", + "git_revparse", + "git_revparse_ext", + "git_revparse_single", + "git_revwalk_new", + "git_signature_default", + "git_stash_apply", + "git_stash_drop", + "git_stash_foreach", + "git_stash_pop", + "git_status_file", + "git_status_foreach", + "git_status_foreach_ext", + "git_status_list_new", + "git_status_should_ignore", + "git_submodule_add_setup", + "git_submodule_foreach", + "git_submodule_lookup", + "git_submodule_open", + "git_submodule_repo_init", + "git_submodule_resolve_url", + "git_submodule_set_branch", + "git_submodule_set_fetch_recurse_submodules", + "git_submodule_set_ignore", + "git_submodule_set_update", + "git_submodule_set_url", + "git_submodule_status", + "git_tag_annotation_create", + "git_tag_create", + "git_tag_create_frombuffer", + "git_tag_create_lightweight", + "git_tag_delete", + "git_tag_foreach", + "git_tag_list", + "git_tag_list_match", + "git_tag_lookup", + "git_tag_lookup_prefix", + "git_tree_entry_to_object", + "git_tree_lookup", + "git_tree_lookup_prefix", + "git_treebuilder_new" + ] + } + } + ], + [ + "git_repository_init_flag_t", + { + "decl": [ + "GIT_REPOSITORY_INIT_BARE", + "GIT_REPOSITORY_INIT_NO_REINIT", + "GIT_REPOSITORY_INIT_NO_DOTGIT_DIR", + "GIT_REPOSITORY_INIT_MKDIR", + "GIT_REPOSITORY_INIT_MKPATH", + "GIT_REPOSITORY_INIT_EXTERNAL_TEMPLATE", + "GIT_REPOSITORY_INIT_RELATIVE_GITLINK" + ], + "type": "enum", + "file": "repository.h", + "line": 202, + "lineto": 210, + "block": "GIT_REPOSITORY_INIT_BARE\nGIT_REPOSITORY_INIT_NO_REINIT\nGIT_REPOSITORY_INIT_NO_DOTGIT_DIR\nGIT_REPOSITORY_INIT_MKDIR\nGIT_REPOSITORY_INIT_MKPATH\nGIT_REPOSITORY_INIT_EXTERNAL_TEMPLATE\nGIT_REPOSITORY_INIT_RELATIVE_GITLINK", + "tdef": "typedef", + "description": " Option flags for `git_repository_init_ext`.", + "comments": "

These flags configure extra behaviors to git_repository_init_ext. In every case, the default behavior is the zero value (i.e. flag is not set). Just OR the flag values together for the flags parameter when initializing a new repo. Details of individual values are:

\n\n
    \n
  • BARE - Create a bare repository with no working directory. * NO_REINIT - Return an GIT_EEXISTS error if the repo_path appears to already be an git repository. * NO_DOTGIT_DIR - Normally a "/.git/" will be appended to the repo path for non-bare repos (if it is not already there), but passing this flag prevents that behavior. * MKDIR - Make the repo_path (and workdir_path) as needed. Init is always willing to create the ".git" directory even without this flag. This flag tells init to create the trailing component of the repo and workdir paths as needed. * MKPATH - Recursively make all components of the repo and workdir paths as necessary. * EXTERNAL_TEMPLATE - libgit2 normally uses internal templates to initialize a new repo. This flags enables external templates, looking the "template_path" from the options if set, or the init.templatedir global config if not, or falling back on "/usr/share/git-core/templates" if it exists. * GIT_REPOSITORY_INIT_RELATIVE_GITLINK - If an alternate workdir is specified, use relative paths for the gitdir and core.worktree.
  • \n
\n", + "fields": [ + { + "type": "int", + "name": "GIT_REPOSITORY_INIT_BARE", + "comments": "", + "value": 1 + }, + { + "type": "int", + "name": "GIT_REPOSITORY_INIT_NO_REINIT", + "comments": "", + "value": 2 + }, + { + "type": "int", + "name": "GIT_REPOSITORY_INIT_NO_DOTGIT_DIR", + "comments": "", + "value": 4 + }, + { + "type": "int", + "name": "GIT_REPOSITORY_INIT_MKDIR", + "comments": "", + "value": 8 + }, + { + "type": "int", + "name": "GIT_REPOSITORY_INIT_MKPATH", + "comments": "", + "value": 16 + }, + { + "type": "int", + "name": "GIT_REPOSITORY_INIT_EXTERNAL_TEMPLATE", + "comments": "", + "value": 32 + }, + { + "type": "int", + "name": "GIT_REPOSITORY_INIT_RELATIVE_GITLINK", + "comments": "", + "value": 64 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_repository_init_mode_t", + { + "decl": [ + "GIT_REPOSITORY_INIT_SHARED_UMASK", + "GIT_REPOSITORY_INIT_SHARED_GROUP", + "GIT_REPOSITORY_INIT_SHARED_ALL" + ], + "type": "enum", + "file": "repository.h", + "line": 225, + "lineto": 229, + "block": "GIT_REPOSITORY_INIT_SHARED_UMASK\nGIT_REPOSITORY_INIT_SHARED_GROUP\nGIT_REPOSITORY_INIT_SHARED_ALL", + "tdef": "typedef", + "description": " Mode options for `git_repository_init_ext`.", + "comments": "

Set the mode field of the git_repository_init_options structure either to the custom mode that you would like, or to one of the following modes:

\n\n
    \n
  • SHARED_UMASK - Use permissions configured by umask - the default. * SHARED_GROUP - Use "--shared=group" behavior, chmod'ing the new repo to be group writable and "g+sx" for sticky group assignment. * SHARED_ALL - Use "--shared=all" behavior, adding world readability. * Anything else - Set to custom value.
  • \n
\n", + "fields": [ + { + "type": "int", + "name": "GIT_REPOSITORY_INIT_SHARED_UMASK", + "comments": "", + "value": 0 + }, + { + "type": "int", + "name": "GIT_REPOSITORY_INIT_SHARED_GROUP", + "comments": "", + "value": 1533 + }, + { + "type": "int", + "name": "GIT_REPOSITORY_INIT_SHARED_ALL", + "comments": "", + "value": 1535 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_repository_init_options", + { + "decl": [ + "unsigned int version", + "uint32_t flags", + "uint32_t mode", + "const char * workdir_path", + "const char * description", + "const char * template_path", + "const char * initial_head", + "const char * origin_url" + ], + "type": "struct", + "value": "git_repository_init_options", + "file": "repository.h", + "line": 259, + "lineto": 268, + "block": "unsigned int version\nuint32_t flags\nuint32_t mode\nconst char * workdir_path\nconst char * description\nconst char * template_path\nconst char * initial_head\nconst char * origin_url", + "tdef": "typedef", + "description": " Extended options structure for `git_repository_init_ext`.", + "comments": "

This contains extra options for git_repository_init_ext that enable additional initialization features. The fields are:

\n\n
    \n
  • flags - Combination of GIT_REPOSITORY_INIT flags above. * mode - Set to one of the standard GIT_REPOSITORY_INIT_SHARED_... constants above, or to a custom value that you would like. * workdir_path - The path to the working dir or NULL for default (i.e. repo_path parent on non-bare repos). IF THIS IS RELATIVE PATH, IT WILL BE EVALUATED RELATIVE TO THE REPO_PATH. If this is not the "natural" working directory, a .git gitlink file will be created here linking to the repo_path. * description - If set, this will be used to initialize the "description" file in the repository, instead of using the template content. * template_path - When GIT_REPOSITORY_INIT_EXTERNAL_TEMPLATE is set, this contains the path to use for the template directory. If this is NULL, the config or default directory options will be used instead. * initial_head - The name of the head to point HEAD at. If NULL, then this will be treated as "master" and the HEAD ref will be set to "refs/heads/master". If this begins with "refs/" it will be used verbatim; otherwise "refs/heads/" will be prefixed. * origin_url - If this is non-NULL, then after the rest of the repository initialization is completed, an "origin" remote will be added pointing to this URL.
  • \n
\n", + "fields": [ + { + "type": "unsigned int", + "name": "version", + "comments": "" + }, + { + "type": "uint32_t", + "name": "flags", + "comments": "" + }, + { + "type": "uint32_t", + "name": "mode", + "comments": "" + }, + { + "type": "const char *", + "name": "workdir_path", + "comments": "" + }, + { + "type": "const char *", + "name": "description", + "comments": "" + }, + { + "type": "const char *", + "name": "template_path", + "comments": "" + }, + { + "type": "const char *", + "name": "initial_head", + "comments": "" + }, + { + "type": "const char *", + "name": "origin_url", + "comments": "" + } + ], + "used": { + "returns": [], + "needs": [ + "git_repository_init_ext", + "git_repository_init_init_options" + ] + } + } + ], + [ + "git_repository_open_flag_t", + { + "decl": [ + "GIT_REPOSITORY_OPEN_NO_SEARCH", + "GIT_REPOSITORY_OPEN_CROSS_FS", + "GIT_REPOSITORY_OPEN_BARE" + ], + "type": "enum", + "file": "repository.h", + "line": 99, + "lineto": 103, + "block": "GIT_REPOSITORY_OPEN_NO_SEARCH\nGIT_REPOSITORY_OPEN_CROSS_FS\nGIT_REPOSITORY_OPEN_BARE", + "tdef": "typedef", + "description": " Option flags for `git_repository_open_ext`.", + "comments": "
    \n
  • GIT_REPOSITORY_OPEN_NO_SEARCH - Only open the repository if it can be immediately found in the start_path. Do not walk up from the start_path looking at parent directories. * GIT_REPOSITORY_OPEN_CROSS_FS - Unless this flag is set, open will not continue searching across filesystem boundaries (i.e. when st_dev changes from the stat system call). (E.g. Searching in a user's home directory "/home/user/source/" will not return "/.git/" as the found repo if "/" is a different filesystem than "/home".) * GIT_REPOSITORY_OPEN_BARE - Open repository as a bare repo regardless of core.bare config, and defer loading config file for faster setup. Unlike git_repository_open_bare, this can follow gitlinks.
  • \n
\n", + "fields": [ + { + "type": "int", + "name": "GIT_REPOSITORY_OPEN_NO_SEARCH", + "comments": "", + "value": 1 + }, + { + "type": "int", + "name": "GIT_REPOSITORY_OPEN_CROSS_FS", + "comments": "", + "value": 2 + }, + { + "type": "int", + "name": "GIT_REPOSITORY_OPEN_BARE", + "comments": "", + "value": 4 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_repository_state_t", + { + "decl": [ + "GIT_REPOSITORY_STATE_NONE", + "GIT_REPOSITORY_STATE_MERGE", + "GIT_REPOSITORY_STATE_REVERT", + "GIT_REPOSITORY_STATE_REVERT_SEQUENCE", + "GIT_REPOSITORY_STATE_CHERRYPICK", + "GIT_REPOSITORY_STATE_CHERRYPICK_SEQUENCE", + "GIT_REPOSITORY_STATE_BISECT", + "GIT_REPOSITORY_STATE_REBASE", + "GIT_REPOSITORY_STATE_REBASE_INTERACTIVE", + "GIT_REPOSITORY_STATE_REBASE_MERGE", + "GIT_REPOSITORY_STATE_APPLY_MAILBOX", + "GIT_REPOSITORY_STATE_APPLY_MAILBOX_OR_REBASE" + ], + "type": "enum", + "file": "repository.h", + "line": 674, + "lineto": 687, + "block": "GIT_REPOSITORY_STATE_NONE\nGIT_REPOSITORY_STATE_MERGE\nGIT_REPOSITORY_STATE_REVERT\nGIT_REPOSITORY_STATE_REVERT_SEQUENCE\nGIT_REPOSITORY_STATE_CHERRYPICK\nGIT_REPOSITORY_STATE_CHERRYPICK_SEQUENCE\nGIT_REPOSITORY_STATE_BISECT\nGIT_REPOSITORY_STATE_REBASE\nGIT_REPOSITORY_STATE_REBASE_INTERACTIVE\nGIT_REPOSITORY_STATE_REBASE_MERGE\nGIT_REPOSITORY_STATE_APPLY_MAILBOX\nGIT_REPOSITORY_STATE_APPLY_MAILBOX_OR_REBASE", + "tdef": "typedef", + "description": " Repository state", + "comments": "

These values represent possible states for the repository to be in, based on the current operation which is ongoing.

\n", + "fields": [ + { + "type": "int", + "name": "GIT_REPOSITORY_STATE_NONE", + "comments": "", + "value": 0 + }, + { + "type": "int", + "name": "GIT_REPOSITORY_STATE_MERGE", + "comments": "", + "value": 1 + }, + { + "type": "int", + "name": "GIT_REPOSITORY_STATE_REVERT", + "comments": "", + "value": 2 + }, + { + "type": "int", + "name": "GIT_REPOSITORY_STATE_REVERT_SEQUENCE", + "comments": "", + "value": 3 + }, + { + "type": "int", + "name": "GIT_REPOSITORY_STATE_CHERRYPICK", + "comments": "", + "value": 4 + }, + { + "type": "int", + "name": "GIT_REPOSITORY_STATE_CHERRYPICK_SEQUENCE", + "comments": "", + "value": 5 + }, + { + "type": "int", + "name": "GIT_REPOSITORY_STATE_BISECT", + "comments": "", + "value": 6 + }, + { + "type": "int", + "name": "GIT_REPOSITORY_STATE_REBASE", + "comments": "", + "value": 7 + }, + { + "type": "int", + "name": "GIT_REPOSITORY_STATE_REBASE_INTERACTIVE", + "comments": "", + "value": 8 + }, + { + "type": "int", + "name": "GIT_REPOSITORY_STATE_REBASE_MERGE", + "comments": "", + "value": 9 + }, + { + "type": "int", + "name": "GIT_REPOSITORY_STATE_APPLY_MAILBOX", + "comments": "", + "value": 10 + }, + { + "type": "int", + "name": "GIT_REPOSITORY_STATE_APPLY_MAILBOX_OR_REBASE", + "comments": "", + "value": 11 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_reset_t", + { + "decl": [ + "GIT_RESET_SOFT", + "GIT_RESET_MIXED", + "GIT_RESET_HARD" + ], + "type": "enum", + "file": "reset.h", + "line": 26, + "lineto": 30, + "block": "GIT_RESET_SOFT\nGIT_RESET_MIXED\nGIT_RESET_HARD", + "tdef": "typedef", + "description": " Kinds of reset operation", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_RESET_SOFT", + "comments": "

Move the head to the given commit

\n", + "value": 1 + }, + { + "type": "int", + "name": "GIT_RESET_MIXED", + "comments": "

SOFT plus reset index to the commit

\n", + "value": 2 + }, + { + "type": "int", + "name": "GIT_RESET_HARD", + "comments": "

MIXED plus changes in working tree discarded

\n", + "value": 3 + } + ], + "used": { + "returns": [], + "needs": [ + "git_reset", + "git_reset_from_annotated" + ] + } + } + ], + [ + "git_revert_options", + { + "decl": [ + "unsigned int version", + "unsigned int mainline", + "git_merge_options merge_opts", + "git_checkout_options checkout_opts" + ], + "type": "struct", + "value": "git_revert_options", + "file": "revert.h", + "line": 26, + "lineto": 34, + "block": "unsigned int version\nunsigned int mainline\ngit_merge_options merge_opts\ngit_checkout_options checkout_opts", + "tdef": "typedef", + "description": " Options for revert", + "comments": "", + "fields": [ + { + "type": "unsigned int", + "name": "version", + "comments": "" + }, + { + "type": "unsigned int", + "name": "mainline", + "comments": " For merge commits, the \"mainline\" is treated as the parent. " + }, + { + "type": "git_merge_options", + "name": "merge_opts", + "comments": " Options for the merging " + }, + { + "type": "git_checkout_options", + "name": "checkout_opts", + "comments": " Options for the checkout " + } + ], + "used": { + "returns": [], + "needs": [ + "git_revert", + "git_revert_init_options" + ] + } + } + ], + [ + "git_revparse_mode_t", + { + "decl": [ + "GIT_REVPARSE_SINGLE", + "GIT_REVPARSE_RANGE", + "GIT_REVPARSE_MERGE_BASE" + ], + "type": "enum", + "file": "revparse.h", + "line": 71, + "lineto": 78, + "block": "GIT_REVPARSE_SINGLE\nGIT_REVPARSE_RANGE\nGIT_REVPARSE_MERGE_BASE", + "tdef": "typedef", + "description": " Revparse flags. These indicate the intended behavior of the spec passed to\n git_revparse.", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_REVPARSE_SINGLE", + "comments": "

The spec targeted a single object.

\n", + "value": 1 + }, + { + "type": "int", + "name": "GIT_REVPARSE_RANGE", + "comments": "

The spec targeted a range of commits.

\n", + "value": 2 + }, + { + "type": "int", + "name": "GIT_REVPARSE_MERGE_BASE", + "comments": "

The spec used the '...' operator, which invokes special semantics.

\n", + "value": 4 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_revspec", + { + "decl": [ + "git_object * from", + "git_object * to", + "unsigned int flags" + ], + "type": "struct", + "value": "git_revspec", + "file": "revparse.h", + "line": 83, + "lineto": 90, + "block": "git_object * from\ngit_object * to\nunsigned int flags", + "tdef": "typedef", + "description": " Git Revision Spec: output of a `git_revparse` operation", + "comments": "", + "fields": [ + { + "type": "git_object *", + "name": "from", + "comments": " The left element of the revspec; must be freed by the user " + }, + { + "type": "git_object *", + "name": "to", + "comments": " The right element of the revspec; must be freed by the user " + }, + { + "type": "unsigned int", + "name": "flags", + "comments": " The intent of the revspec (i.e. `git_revparse_mode_t` flags) " + } + ], + "used": { + "returns": [], + "needs": [ + "git_revparse" + ] + } + } + ], + [ + "git_revwalk", + { + "decl": "git_revwalk", + "type": "struct", + "value": "git_revwalk", + "file": "types.h", + "line": 111, + "lineto": 111, + "tdef": "typedef", + "description": " Representation of an in-progress walk through the commits in a repo ", + "comments": "", + "used": { + "returns": [], + "needs": [ + "git_packbuilder_insert_walk", + "git_revwalk_add_hide_cb", + "git_revwalk_free", + "git_revwalk_hide", + "git_revwalk_hide_glob", + "git_revwalk_hide_head", + "git_revwalk_hide_ref", + "git_revwalk_new", + "git_revwalk_next", + "git_revwalk_push", + "git_revwalk_push_glob", + "git_revwalk_push_head", + "git_revwalk_push_range", + "git_revwalk_push_ref", + "git_revwalk_repository", + "git_revwalk_reset", + "git_revwalk_simplify_first_parent", + "git_revwalk_sorting" + ] + } + } + ], + [ + "git_signature", + { + "decl": [ + "char * name", + "char * email", + "git_time when" + ], + "type": "struct", + "value": "git_signature", + "file": "types.h", + "line": 162, + "lineto": 166, + "block": "char * name\nchar * email\ngit_time when", + "tdef": "typedef", + "description": " An action signature (e.g. for committers, taggers, etc) ", + "comments": "", + "fields": [ + { + "type": "char *", + "name": "name", + "comments": " full name of the author " + }, + { + "type": "char *", + "name": "email", + "comments": " email of the author " + }, + { + "type": "git_time", + "name": "when", + "comments": " time when the action happened " + } + ], + "used": { + "returns": [ + "git_commit_author", + "git_commit_committer", + "git_note_author", + "git_note_committer", + "git_reflog_entry_committer", + "git_tag_tagger" + ], + "needs": [ + "git_commit_amend", + "git_commit_create", + "git_commit_create_buffer", + "git_commit_create_from_callback", + "git_commit_create_v", + "git_note_create", + "git_note_remove", + "git_rebase_commit", + "git_rebase_finish", + "git_reflog_append", + "git_signature_default", + "git_signature_dup", + "git_signature_free", + "git_signature_new", + "git_signature_now", + "git_tag_annotation_create", + "git_tag_create" + ] + } + } + ], + [ + "git_smart_subtransport_definition", + { + "decl": [ + "git_smart_subtransport_cb callback", + "unsigned int rpc", + "void * param" + ], + "type": "struct", + "value": "git_smart_subtransport_definition", + "file": "sys/transport.h", + "line": 326, + "lineto": 339, + "block": "git_smart_subtransport_cb callback\nunsigned int rpc\nvoid * param", + "tdef": "typedef", + "description": " Definition for a \"subtransport\"", + "comments": "

This is used to let the smart protocol code know about the protocol which you are implementing.

\n", + "fields": [ + { + "type": "git_smart_subtransport_cb", + "name": "callback", + "comments": " The function to use to create the git_smart_subtransport " + }, + { + "type": "unsigned int", + "name": "rpc", + "comments": " True if the protocol is stateless; false otherwise. For example,\n http:// is stateless, but git:// is not." + }, + { + "type": "void *", + "name": "param", + "comments": " Param of the callback" + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_sort_t", + { + "decl": [ + "GIT_SORT_NONE", + "GIT_SORT_TOPOLOGICAL", + "GIT_SORT_TIME", + "GIT_SORT_REVERSE" + ], + "type": "enum", + "file": "revwalk.h", + "line": 26, + "lineto": 55, + "block": "GIT_SORT_NONE\nGIT_SORT_TOPOLOGICAL\nGIT_SORT_TIME\nGIT_SORT_REVERSE", + "tdef": "typedef", + "description": " Flags to specify the sorting which a revwalk should perform.", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_SORT_NONE", + "comments": "

Sort the repository contents in no particular ordering;\n this sorting is arbitrary, implementation-specific\n and subject to change at any time.\n This is the default sorting for new walkers.

\n", + "value": 0 + }, + { + "type": "int", + "name": "GIT_SORT_TOPOLOGICAL", + "comments": "

Sort the repository contents in topological order\n (parents before children); this sorting mode\n can be combined with time sorting.

\n", + "value": 1 + }, + { + "type": "int", + "name": "GIT_SORT_TIME", + "comments": "

Sort the repository contents by commit time;\n this sorting mode can be combined with\n topological sorting.

\n", + "value": 2 + }, + { + "type": "int", + "name": "GIT_SORT_REVERSE", + "comments": "

Iterate through the repository contents in reverse\n order; this sorting mode can be combined with\n any of the above.

\n", + "value": 4 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_stash_apply_flags", + { + "decl": [ + "GIT_STASH_APPLY_DEFAULT", + "GIT_STASH_APPLY_REINSTATE_INDEX" + ], + "type": "enum", + "file": "stash.h", + "line": 74, + "lineto": 81, + "block": "GIT_STASH_APPLY_DEFAULT\nGIT_STASH_APPLY_REINSTATE_INDEX", + "tdef": "typedef", + "description": " Stash application flags. ", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_STASH_APPLY_DEFAULT", + "comments": "", + "value": 0 + }, + { + "type": "int", + "name": "GIT_STASH_APPLY_REINSTATE_INDEX", + "comments": "", + "value": 1 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_stash_flags", + { + "decl": [ + "GIT_STASH_DEFAULT", + "GIT_STASH_KEEP_INDEX", + "GIT_STASH_INCLUDE_UNTRACKED", + "GIT_STASH_INCLUDE_IGNORED" + ], + "type": "enum", + "file": "stash.h", + "line": 24, + "lineto": 47, + "block": "GIT_STASH_DEFAULT\nGIT_STASH_KEEP_INDEX\nGIT_STASH_INCLUDE_UNTRACKED\nGIT_STASH_INCLUDE_IGNORED", + "tdef": "typedef", + "description": " Stash flags", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_STASH_DEFAULT", + "comments": "

No option, default

\n", + "value": 0 + }, + { + "type": "int", + "name": "GIT_STASH_KEEP_INDEX", + "comments": "

All changes already added to the index are left intact in\n the working directory

\n", + "value": 1 + }, + { + "type": "int", + "name": "GIT_STASH_INCLUDE_UNTRACKED", + "comments": "

All untracked files are also stashed and then cleaned up\n from the working directory

\n", + "value": 2 + }, + { + "type": "int", + "name": "GIT_STASH_INCLUDE_IGNORED", + "comments": "

All ignored files are also stashed and then cleaned up from\n the working directory

\n", + "value": 4 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_status_list", + { + "decl": "git_status_list", + "type": "struct", + "value": "git_status_list", + "file": "types.h", + "line": 184, + "lineto": 184, + "tdef": "typedef", + "description": " Representation of a status collection ", + "comments": "", + "used": { + "returns": [], + "needs": [ + "git_status_byindex", + "git_status_list_entrycount", + "git_status_list_free", + "git_status_list_get_perfdata", + "git_status_list_new" + ] + } + } + ], + [ + "git_status_opt_t", + { + "decl": [ + "GIT_STATUS_OPT_INCLUDE_UNTRACKED", + "GIT_STATUS_OPT_INCLUDE_IGNORED", + "GIT_STATUS_OPT_INCLUDE_UNMODIFIED", + "GIT_STATUS_OPT_EXCLUDE_SUBMODULES", + "GIT_STATUS_OPT_RECURSE_UNTRACKED_DIRS", + "GIT_STATUS_OPT_DISABLE_PATHSPEC_MATCH", + "GIT_STATUS_OPT_RECURSE_IGNORED_DIRS", + "GIT_STATUS_OPT_RENAMES_HEAD_TO_INDEX", + "GIT_STATUS_OPT_RENAMES_INDEX_TO_WORKDIR", + "GIT_STATUS_OPT_SORT_CASE_SENSITIVELY", + "GIT_STATUS_OPT_SORT_CASE_INSENSITIVELY", + "GIT_STATUS_OPT_RENAMES_FROM_REWRITES", + "GIT_STATUS_OPT_NO_REFRESH", + "GIT_STATUS_OPT_UPDATE_INDEX", + "GIT_STATUS_OPT_INCLUDE_UNREADABLE", + "GIT_STATUS_OPT_INCLUDE_UNREADABLE_AS_UNTRACKED" + ], + "type": "enum", + "file": "status.h", + "line": 137, + "lineto": 154, + "block": "GIT_STATUS_OPT_INCLUDE_UNTRACKED\nGIT_STATUS_OPT_INCLUDE_IGNORED\nGIT_STATUS_OPT_INCLUDE_UNMODIFIED\nGIT_STATUS_OPT_EXCLUDE_SUBMODULES\nGIT_STATUS_OPT_RECURSE_UNTRACKED_DIRS\nGIT_STATUS_OPT_DISABLE_PATHSPEC_MATCH\nGIT_STATUS_OPT_RECURSE_IGNORED_DIRS\nGIT_STATUS_OPT_RENAMES_HEAD_TO_INDEX\nGIT_STATUS_OPT_RENAMES_INDEX_TO_WORKDIR\nGIT_STATUS_OPT_SORT_CASE_SENSITIVELY\nGIT_STATUS_OPT_SORT_CASE_INSENSITIVELY\nGIT_STATUS_OPT_RENAMES_FROM_REWRITES\nGIT_STATUS_OPT_NO_REFRESH\nGIT_STATUS_OPT_UPDATE_INDEX\nGIT_STATUS_OPT_INCLUDE_UNREADABLE\nGIT_STATUS_OPT_INCLUDE_UNREADABLE_AS_UNTRACKED", + "tdef": "typedef", + "description": " Flags to control status callbacks", + "comments": "
    \n
  • GIT_STATUS_OPT_INCLUDE_UNTRACKED says that callbacks should be made on untracked files. These will only be made if the workdir files are included in the status "show" option. - GIT_STATUS_OPT_INCLUDE_IGNORED says that ignored files get callbacks. Again, these callbacks will only be made if the workdir files are included in the status "show" option. - GIT_STATUS_OPT_INCLUDE_UNMODIFIED indicates that callback should be made even on unmodified files. - GIT_STATUS_OPT_EXCLUDE_SUBMODULES indicates that submodules should be skipped. This only applies if there are no pending typechanges to the submodule (either from or to another type). - GIT_STATUS_OPT_RECURSE_UNTRACKED_DIRS indicates that all files in untracked directories should be included. Normally if an entire directory is new, then just the top-level directory is included (with a trailing slash on the entry name). This flag says to include all of the individual files in the directory instead. - GIT_STATUS_OPT_DISABLE_PATHSPEC_MATCH indicates that the given path should be treated as a literal path, and not as a pathspec pattern. - GIT_STATUS_OPT_RECURSE_IGNORED_DIRS indicates that the contents of ignored directories should be included in the status. This is like doing git ls-files -o -i --exclude-standard with core git. - GIT_STATUS_OPT_RENAMES_HEAD_TO_INDEX indicates that rename detection should be processed between the head and the index and enables the GIT_STATUS_INDEX_RENAMED as a possible status flag. - GIT_STATUS_OPT_RENAMES_INDEX_TO_WORKDIR indicates that rename detection should be run between the index and the working directory and enabled GIT_STATUS_WT_RENAMED as a possible status flag. - GIT_STATUS_OPT_SORT_CASE_SENSITIVELY overrides the native case sensitivity for the file system and forces the output to be in case-sensitive order - GIT_STATUS_OPT_SORT_CASE_INSENSITIVELY overrides the native case sensitivity for the file system and forces the output to be in case-insensitive order - GIT_STATUS_OPT_RENAMES_FROM_REWRITES indicates that rename detection should include rewritten files - GIT_STATUS_OPT_NO_REFRESH bypasses the default status behavior of doing a "soft" index reload (i.e. reloading the index data if the file on disk has been modified outside libgit2). - GIT_STATUS_OPT_UPDATE_INDEX tells libgit2 to refresh the stat cache in the index for files that are unchanged but have out of date stat information in the index. It will result in less work being done on subsequent calls to get status. This is mutually exclusive with the NO_REFRESH option.
  • \n
\n\n

Calling git_status_foreach() is like calling the extended version with: GIT_STATUS_OPT_INCLUDE_IGNORED, GIT_STATUS_OPT_INCLUDE_UNTRACKED, and GIT_STATUS_OPT_RECURSE_UNTRACKED_DIRS. Those options are bundled together as GIT_STATUS_OPT_DEFAULTS if you want them as a baseline.

\n", + "fields": [ + { + "type": "int", + "name": "GIT_STATUS_OPT_INCLUDE_UNTRACKED", + "comments": "", + "value": 1 + }, + { + "type": "int", + "name": "GIT_STATUS_OPT_INCLUDE_IGNORED", + "comments": "", + "value": 2 + }, + { + "type": "int", + "name": "GIT_STATUS_OPT_INCLUDE_UNMODIFIED", + "comments": "", + "value": 4 + }, + { + "type": "int", + "name": "GIT_STATUS_OPT_EXCLUDE_SUBMODULES", + "comments": "", + "value": 8 + }, + { + "type": "int", + "name": "GIT_STATUS_OPT_RECURSE_UNTRACKED_DIRS", + "comments": "", + "value": 16 + }, + { + "type": "int", + "name": "GIT_STATUS_OPT_DISABLE_PATHSPEC_MATCH", + "comments": "", + "value": 32 + }, + { + "type": "int", + "name": "GIT_STATUS_OPT_RECURSE_IGNORED_DIRS", + "comments": "", + "value": 64 + }, + { + "type": "int", + "name": "GIT_STATUS_OPT_RENAMES_HEAD_TO_INDEX", + "comments": "", + "value": 128 + }, + { + "type": "int", + "name": "GIT_STATUS_OPT_RENAMES_INDEX_TO_WORKDIR", + "comments": "", + "value": 256 + }, + { + "type": "int", + "name": "GIT_STATUS_OPT_SORT_CASE_SENSITIVELY", + "comments": "", + "value": 512 + }, + { + "type": "int", + "name": "GIT_STATUS_OPT_SORT_CASE_INSENSITIVELY", + "comments": "", + "value": 1024 + }, + { + "type": "int", + "name": "GIT_STATUS_OPT_RENAMES_FROM_REWRITES", + "comments": "", + "value": 2048 + }, + { + "type": "int", + "name": "GIT_STATUS_OPT_NO_REFRESH", + "comments": "", + "value": 4096 + }, + { + "type": "int", + "name": "GIT_STATUS_OPT_UPDATE_INDEX", + "comments": "", + "value": 8192 + }, + { + "type": "int", + "name": "GIT_STATUS_OPT_INCLUDE_UNREADABLE", + "comments": "", + "value": 16384 + }, + { + "type": "int", + "name": "GIT_STATUS_OPT_INCLUDE_UNREADABLE_AS_UNTRACKED", + "comments": "", + "value": 32768 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_status_show_t", + { + "decl": [ + "GIT_STATUS_SHOW_INDEX_AND_WORKDIR", + "GIT_STATUS_SHOW_INDEX_ONLY", + "GIT_STATUS_SHOW_WORKDIR_ONLY" + ], + "type": "enum", + "file": "status.h", + "line": 79, + "lineto": 83, + "block": "GIT_STATUS_SHOW_INDEX_AND_WORKDIR\nGIT_STATUS_SHOW_INDEX_ONLY\nGIT_STATUS_SHOW_WORKDIR_ONLY", + "tdef": "typedef", + "description": " Select the files on which to report status.", + "comments": "

With git_status_foreach_ext, this will control which changes get callbacks. With git_status_list_new, these will control which changes are included in the list.

\n\n
    \n
  • GIT_STATUS_SHOW_INDEX_AND_WORKDIR is the default. This roughly matches git status --porcelain regarding which files are included and in what order. - GIT_STATUS_SHOW_INDEX_ONLY only gives status based on HEAD to index comparison, not looking at working directory changes. - GIT_STATUS_SHOW_WORKDIR_ONLY only gives status based on index to working directory comparison, not comparing the index to the HEAD.
  • \n
\n", + "fields": [ + { + "type": "int", + "name": "GIT_STATUS_SHOW_INDEX_AND_WORKDIR", + "comments": "", + "value": 0 + }, + { + "type": "int", + "name": "GIT_STATUS_SHOW_INDEX_ONLY", + "comments": "", + "value": 1 + }, + { + "type": "int", + "name": "GIT_STATUS_SHOW_WORKDIR_ONLY", + "comments": "", + "value": 2 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_status_t", + { + "decl": [ + "GIT_STATUS_CURRENT", + "GIT_STATUS_INDEX_NEW", + "GIT_STATUS_INDEX_MODIFIED", + "GIT_STATUS_INDEX_DELETED", + "GIT_STATUS_INDEX_RENAMED", + "GIT_STATUS_INDEX_TYPECHANGE", + "GIT_STATUS_WT_NEW", + "GIT_STATUS_WT_MODIFIED", + "GIT_STATUS_WT_DELETED", + "GIT_STATUS_WT_TYPECHANGE", + "GIT_STATUS_WT_RENAMED", + "GIT_STATUS_WT_UNREADABLE", + "GIT_STATUS_IGNORED", + "GIT_STATUS_CONFLICTED" + ], + "type": "enum", + "file": "status.h", + "line": 32, + "lineto": 50, + "block": "GIT_STATUS_CURRENT\nGIT_STATUS_INDEX_NEW\nGIT_STATUS_INDEX_MODIFIED\nGIT_STATUS_INDEX_DELETED\nGIT_STATUS_INDEX_RENAMED\nGIT_STATUS_INDEX_TYPECHANGE\nGIT_STATUS_WT_NEW\nGIT_STATUS_WT_MODIFIED\nGIT_STATUS_WT_DELETED\nGIT_STATUS_WT_TYPECHANGE\nGIT_STATUS_WT_RENAMED\nGIT_STATUS_WT_UNREADABLE\nGIT_STATUS_IGNORED\nGIT_STATUS_CONFLICTED", + "tdef": "typedef", + "description": " Status flags for a single file.", + "comments": "

A combination of these values will be returned to indicate the status of a file. Status compares the working directory, the index, and the current HEAD of the repository. The GIT_STATUS_INDEX set of flags represents the status of file in the index relative to the HEAD, and the GIT_STATUS_WT set of flags represent the status of the file in the working directory relative to the index.

\n", + "fields": [ + { + "type": "int", + "name": "GIT_STATUS_CURRENT", + "comments": "", + "value": 0 + }, + { + "type": "int", + "name": "GIT_STATUS_INDEX_NEW", + "comments": "", + "value": 1 + }, + { + "type": "int", + "name": "GIT_STATUS_INDEX_MODIFIED", + "comments": "", + "value": 2 + }, + { + "type": "int", + "name": "GIT_STATUS_INDEX_DELETED", + "comments": "", + "value": 4 + }, + { + "type": "int", + "name": "GIT_STATUS_INDEX_RENAMED", + "comments": "", + "value": 8 + }, + { + "type": "int", + "name": "GIT_STATUS_INDEX_TYPECHANGE", + "comments": "", + "value": 16 + }, + { + "type": "int", + "name": "GIT_STATUS_WT_NEW", + "comments": "", + "value": 128 + }, + { + "type": "int", + "name": "GIT_STATUS_WT_MODIFIED", + "comments": "", + "value": 256 + }, + { + "type": "int", + "name": "GIT_STATUS_WT_DELETED", + "comments": "", + "value": 512 + }, + { + "type": "int", + "name": "GIT_STATUS_WT_TYPECHANGE", + "comments": "", + "value": 1024 + }, + { + "type": "int", + "name": "GIT_STATUS_WT_RENAMED", + "comments": "", + "value": 2048 + }, + { + "type": "int", + "name": "GIT_STATUS_WT_UNREADABLE", + "comments": "", + "value": 4096 + }, + { + "type": "int", + "name": "GIT_STATUS_IGNORED", + "comments": "", + "value": 16384 + }, + { + "type": "int", + "name": "GIT_STATUS_CONFLICTED", + "comments": "", + "value": 32768 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_strarray", + { + "decl": [ + "char ** strings", + "size_t count" + ], + "type": "struct", + "value": "git_strarray", + "file": "strarray.h", + "line": 22, + "lineto": 25, + "block": "char ** strings\nsize_t count", + "tdef": "typedef", + "description": " Array of strings ", + "comments": "", + "fields": [ + { + "type": "char **", + "name": "strings", + "comments": "" + }, + { + "type": "size_t", + "name": "count", + "comments": "" + } + ], + "used": { + "returns": [], + "needs": [ + "git_index_add_all", + "git_index_remove_all", + "git_index_update_all", + "git_pathspec_new", + "git_reference_list", + "git_remote_connect", + "git_remote_download", + "git_remote_fetch", + "git_remote_get_fetch_refspecs", + "git_remote_get_push_refspecs", + "git_remote_list", + "git_remote_push", + "git_remote_rename", + "git_remote_upload", + "git_reset_default", + "git_strarray_copy", + "git_strarray_free", + "git_tag_list", + "git_tag_list_match" + ] + } + } + ], + [ + "git_stream", + { + "decl": [ + "int version", + "int encrypted", + "int proxy_support", + "int (*)(struct git_stream *) connect", + "int (*)(git_cert **, struct git_stream *) certificate", + "int (*)(struct git_stream *, const int *) set_proxy", + "ssize_t (*)(struct git_stream *, void *, int) read", + "ssize_t (*)(struct git_stream *, const char *, int, int) write", + "int (*)(struct git_stream *) close", + "void (*)(struct git_stream *) free" + ], + "type": "struct", + "value": "git_stream", + "file": "sys/stream.h", + "line": 29, + "lineto": 41, + "block": "int version\nint encrypted\nint proxy_support\nint (*)(struct git_stream *) connect\nint (*)(git_cert **, struct git_stream *) certificate\nint (*)(struct git_stream *, const int *) set_proxy\nssize_t (*)(struct git_stream *, void *, int) read\nssize_t (*)(struct git_stream *, const char *, int, int) write\nint (*)(struct git_stream *) close\nvoid (*)(struct git_stream *) free", + "tdef": "typedef", + "description": " Every stream must have this struct as its first element, so the\n API can talk to it. You'd define your stream as", + "comments": "
 struct my_stream {             git_stream parent;             ...     }\n
\n\n

and fill the functions

\n", + "fields": [ + { + "type": "int", + "name": "version", + "comments": "" + }, + { + "type": "int", + "name": "encrypted", + "comments": "" + }, + { + "type": "int", + "name": "proxy_support", + "comments": "" + }, + { + "type": "int (*)(struct git_stream *)", + "name": "connect", + "comments": "" + }, + { + "type": "int (*)(git_cert **, struct git_stream *)", + "name": "certificate", + "comments": "" + }, + { + "type": "int (*)(struct git_stream *, const int *)", + "name": "set_proxy", + "comments": "" + }, + { + "type": "ssize_t (*)(struct git_stream *, void *, int)", + "name": "read", + "comments": "" + }, + { + "type": "ssize_t (*)(struct git_stream *, const char *, int, int)", + "name": "write", + "comments": "" + }, + { + "type": "int (*)(struct git_stream *)", + "name": "close", + "comments": "" + }, + { + "type": "void (*)(struct git_stream *)", + "name": "free", + "comments": "" + } + ], + "used": { + "returns": [], + "needs": [ + "git_stream_register_tls" + ] + } + } + ], + [ + "git_submodule", + { + "decl": "git_submodule", + "type": "struct", + "value": "git_submodule", + "file": "types.h", + "line": 335, + "lineto": 335, + "tdef": "typedef", + "description": " Opaque structure representing a submodule.", + "comments": "", + "used": { + "returns": [ + "git_submodule_fetch_recurse_submodules", + "git_submodule_ignore", + "git_submodule_update_strategy" + ], + "needs": [ + "git_submodule_add_finalize", + "git_submodule_add_setup", + "git_submodule_add_to_index", + "git_submodule_branch", + "git_submodule_cb", + "git_submodule_fetch_recurse_submodules", + "git_submodule_foreach", + "git_submodule_free", + "git_submodule_head_id", + "git_submodule_ignore", + "git_submodule_index_id", + "git_submodule_init", + "git_submodule_location", + "git_submodule_lookup", + "git_submodule_name", + "git_submodule_open", + "git_submodule_owner", + "git_submodule_path", + "git_submodule_reload", + "git_submodule_repo_init", + "git_submodule_set_fetch_recurse_submodules", + "git_submodule_set_ignore", + "git_submodule_set_update", + "git_submodule_status", + "git_submodule_sync", + "git_submodule_update", + "git_submodule_update_init_options", + "git_submodule_update_strategy", + "git_submodule_url", + "git_submodule_wd_id" + ] + } + } + ], + [ + "git_submodule_ignore_t", + { + "decl": [ + "GIT_SUBMODULE_IGNORE_UNSPECIFIED", + "GIT_SUBMODULE_IGNORE_NONE", + "GIT_SUBMODULE_IGNORE_UNTRACKED", + "GIT_SUBMODULE_IGNORE_DIRTY", + "GIT_SUBMODULE_IGNORE_ALL" + ], + "type": "enum", + "file": "types.h", + "line": 399, + "lineto": 406, + "block": "GIT_SUBMODULE_IGNORE_UNSPECIFIED\nGIT_SUBMODULE_IGNORE_NONE\nGIT_SUBMODULE_IGNORE_UNTRACKED\nGIT_SUBMODULE_IGNORE_DIRTY\nGIT_SUBMODULE_IGNORE_ALL", + "tdef": "typedef", + "description": " Submodule ignore values", + "comments": "

These values represent settings for the submodule.$name.ignore configuration value which says how deeply to look at the working directory when getting submodule status.

\n\n

You can override this value in memory on a per-submodule basis with git_submodule_set_ignore() and can write the changed value to disk with git_submodule_save(). If you have overwritten the value, you can revert to the on disk value by using GIT_SUBMODULE_IGNORE_RESET.

\n\n

The values are:

\n\n
    \n
  • GIT_SUBMODULE_IGNORE_UNSPECIFIED: use the submodule's configuration - GIT_SUBMODULE_IGNORE_NONE: don't ignore any change - i.e. even an untracked file, will mark the submodule as dirty. Ignored files are still ignored, of course. - GIT_SUBMODULE_IGNORE_UNTRACKED: ignore untracked files; only changes to tracked files, or the index or the HEAD commit will matter. - GIT_SUBMODULE_IGNORE_DIRTY: ignore changes in the working directory, only considering changes if the HEAD of submodule has moved from the value in the superproject. - GIT_SUBMODULE_IGNORE_ALL: never check if the submodule is dirty - GIT_SUBMODULE_IGNORE_DEFAULT: not used except as static initializer when we don't want any particular ignore rule to be specified.
  • \n
\n", + "fields": [ + { + "type": "int", + "name": "GIT_SUBMODULE_IGNORE_UNSPECIFIED", + "comments": "

use the submodule's configuration

\n", + "value": -1 + }, + { + "type": "int", + "name": "GIT_SUBMODULE_IGNORE_NONE", + "comments": "

any change or untracked == dirty

\n", + "value": 1 + }, + { + "type": "int", + "name": "GIT_SUBMODULE_IGNORE_UNTRACKED", + "comments": "

dirty if tracked files change

\n", + "value": 2 + }, + { + "type": "int", + "name": "GIT_SUBMODULE_IGNORE_DIRTY", + "comments": "

only dirty if HEAD moved

\n", + "value": 3 + }, + { + "type": "int", + "name": "GIT_SUBMODULE_IGNORE_ALL", + "comments": "

never dirty

\n", + "value": 4 + } + ], + "used": { + "returns": [ + "git_submodule_ignore" + ], + "needs": [ + "git_submodule_set_ignore", + "git_submodule_status" + ] + } + } + ], + [ + "git_submodule_recurse_t", + { + "decl": [ + "GIT_SUBMODULE_RECURSE_NO", + "GIT_SUBMODULE_RECURSE_YES", + "GIT_SUBMODULE_RECURSE_ONDEMAND" + ], + "type": "enum", + "file": "types.h", + "line": 418, + "lineto": 422, + "block": "GIT_SUBMODULE_RECURSE_NO\nGIT_SUBMODULE_RECURSE_YES\nGIT_SUBMODULE_RECURSE_ONDEMAND", + "tdef": "typedef", + "description": " Options for submodule recurse.", + "comments": "

Represent the value of submodule.$name.fetchRecurseSubmodules

\n\n
    \n
  • GIT_SUBMODULE_RECURSE_NO - do no recurse into submodules * GIT_SUBMODULE_RECURSE_YES - recurse into submodules * GIT_SUBMODULE_RECURSE_ONDEMAND - recurse into submodules only when commit not already in local clone
  • \n
\n", + "fields": [ + { + "type": "int", + "name": "GIT_SUBMODULE_RECURSE_NO", + "comments": "", + "value": 0 + }, + { + "type": "int", + "name": "GIT_SUBMODULE_RECURSE_YES", + "comments": "", + "value": 1 + }, + { + "type": "int", + "name": "GIT_SUBMODULE_RECURSE_ONDEMAND", + "comments": "", + "value": 2 + } + ], + "used": { + "returns": [ + "git_submodule_fetch_recurse_submodules" + ], + "needs": [ + "git_submodule_set_fetch_recurse_submodules" + ] + } + } + ], + [ + "git_submodule_status_t", + { + "decl": [ + "GIT_SUBMODULE_STATUS_IN_HEAD", + "GIT_SUBMODULE_STATUS_IN_INDEX", + "GIT_SUBMODULE_STATUS_IN_CONFIG", + "GIT_SUBMODULE_STATUS_IN_WD", + "GIT_SUBMODULE_STATUS_INDEX_ADDED", + "GIT_SUBMODULE_STATUS_INDEX_DELETED", + "GIT_SUBMODULE_STATUS_INDEX_MODIFIED", + "GIT_SUBMODULE_STATUS_WD_UNINITIALIZED", + "GIT_SUBMODULE_STATUS_WD_ADDED", + "GIT_SUBMODULE_STATUS_WD_DELETED", + "GIT_SUBMODULE_STATUS_WD_MODIFIED", + "GIT_SUBMODULE_STATUS_WD_INDEX_MODIFIED", + "GIT_SUBMODULE_STATUS_WD_WD_MODIFIED", + "GIT_SUBMODULE_STATUS_WD_UNTRACKED" + ], + "type": "enum", + "file": "submodule.h", + "line": 74, + "lineto": 89, + "block": "GIT_SUBMODULE_STATUS_IN_HEAD\nGIT_SUBMODULE_STATUS_IN_INDEX\nGIT_SUBMODULE_STATUS_IN_CONFIG\nGIT_SUBMODULE_STATUS_IN_WD\nGIT_SUBMODULE_STATUS_INDEX_ADDED\nGIT_SUBMODULE_STATUS_INDEX_DELETED\nGIT_SUBMODULE_STATUS_INDEX_MODIFIED\nGIT_SUBMODULE_STATUS_WD_UNINITIALIZED\nGIT_SUBMODULE_STATUS_WD_ADDED\nGIT_SUBMODULE_STATUS_WD_DELETED\nGIT_SUBMODULE_STATUS_WD_MODIFIED\nGIT_SUBMODULE_STATUS_WD_INDEX_MODIFIED\nGIT_SUBMODULE_STATUS_WD_WD_MODIFIED\nGIT_SUBMODULE_STATUS_WD_UNTRACKED", + "tdef": "typedef", + "description": " Return codes for submodule status.", + "comments": "

A combination of these flags will be returned to describe the status of a submodule. Depending on the "ignore" property of the submodule, some of the flags may never be returned because they indicate changes that are supposed to be ignored.

\n\n

Submodule info is contained in 4 places: the HEAD tree, the index, config files (both .git/config and .gitmodules), and the working directory. Any or all of those places might be missing information about the submodule depending on what state the repo is in. We consider all four places to build the combination of status flags.

\n\n

There are four values that are not really status, but give basic info about what sources of submodule data are available. These will be returned even if ignore is set to "ALL".

\n\n
    \n
  • IN_HEAD - superproject head contains submodule * IN_INDEX - superproject index contains submodule * IN_CONFIG - superproject gitmodules has submodule * IN_WD - superproject workdir has submodule
  • \n
\n\n

The following values will be returned so long as ignore is not "ALL".

\n\n
    \n
  • INDEX_ADDED - in index, not in head * INDEX_DELETED - in head, not in index * INDEX_MODIFIED - index and head don't match * WD_UNINITIALIZED - workdir contains empty directory * WD_ADDED - in workdir, not index * WD_DELETED - in index, not workdir * WD_MODIFIED - index and workdir head don't match
  • \n
\n\n

The following can only be returned if ignore is "NONE" or "UNTRACKED".

\n\n
    \n
  • WD_INDEX_MODIFIED - submodule workdir index is dirty * WD_WD_MODIFIED - submodule workdir has modified files
  • \n
\n\n

Lastly, the following will only be returned for ignore "NONE".

\n\n
    \n
  • WD_UNTRACKED - wd contains untracked files
  • \n
\n", + "fields": [ + { + "type": "int", + "name": "GIT_SUBMODULE_STATUS_IN_HEAD", + "comments": "", + "value": 1 + }, + { + "type": "int", + "name": "GIT_SUBMODULE_STATUS_IN_INDEX", + "comments": "", + "value": 2 + }, + { + "type": "int", + "name": "GIT_SUBMODULE_STATUS_IN_CONFIG", + "comments": "", + "value": 4 + }, + { + "type": "int", + "name": "GIT_SUBMODULE_STATUS_IN_WD", + "comments": "", + "value": 8 + }, + { + "type": "int", + "name": "GIT_SUBMODULE_STATUS_INDEX_ADDED", + "comments": "", + "value": 16 + }, + { + "type": "int", + "name": "GIT_SUBMODULE_STATUS_INDEX_DELETED", + "comments": "", + "value": 32 + }, + { + "type": "int", + "name": "GIT_SUBMODULE_STATUS_INDEX_MODIFIED", + "comments": "", + "value": 64 + }, + { + "type": "int", + "name": "GIT_SUBMODULE_STATUS_WD_UNINITIALIZED", + "comments": "", + "value": 128 + }, + { + "type": "int", + "name": "GIT_SUBMODULE_STATUS_WD_ADDED", + "comments": "", + "value": 256 + }, + { + "type": "int", + "name": "GIT_SUBMODULE_STATUS_WD_DELETED", + "comments": "", + "value": 512 + }, + { + "type": "int", + "name": "GIT_SUBMODULE_STATUS_WD_MODIFIED", + "comments": "", + "value": 1024 + }, + { + "type": "int", + "name": "GIT_SUBMODULE_STATUS_WD_INDEX_MODIFIED", + "comments": "", + "value": 2048 + }, + { + "type": "int", + "name": "GIT_SUBMODULE_STATUS_WD_WD_MODIFIED", + "comments": "", + "value": 4096 + }, + { + "type": "int", + "name": "GIT_SUBMODULE_STATUS_WD_UNTRACKED", + "comments": "", + "value": 8192 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_submodule_update_options", + { + "decl": [ + "unsigned int version", + "git_checkout_options checkout_opts", + "git_fetch_options fetch_opts", + "unsigned int clone_checkout_strategy" + ], + "type": "struct", + "value": "git_submodule_update_options", + "file": "submodule.h", + "line": 129, + "lineto": 157, + "block": "unsigned int version\ngit_checkout_options checkout_opts\ngit_fetch_options fetch_opts\nunsigned int clone_checkout_strategy", + "tdef": "typedef", + "description": " Submodule update options structure", + "comments": "

Use the GIT_SUBMODULE_UPDATE_OPTIONS_INIT to get the default settings, like this:

\n\n

git_submodule_update_options opts = GIT_SUBMODULE_UPDATE_OPTIONS_INIT;

\n", + "fields": [ + { + "type": "unsigned int", + "name": "version", + "comments": "" + }, + { + "type": "git_checkout_options", + "name": "checkout_opts", + "comments": " These options are passed to the checkout step. To disable\n checkout, set the `checkout_strategy` to\n `GIT_CHECKOUT_NONE`. Generally you will want the use\n GIT_CHECKOUT_SAFE to update files in the working\n directory. Use the `clone_checkout_strategy` field\n to set the checkout strategy that will be used in\n the case where update needs to clone the repository." + }, + { + "type": "git_fetch_options", + "name": "fetch_opts", + "comments": " Options which control the fetch, including callbacks.\n\n The callbacks to use for reporting fetch progress, and for acquiring\n credentials in the event they are needed." + }, + { + "type": "unsigned int", + "name": "clone_checkout_strategy", + "comments": " The checkout strategy to use when the sub repository needs to\n be cloned. Use GIT_CHECKOUT_SAFE to create all files\n in the working directory for the newly cloned repository." + } + ], + "used": { + "returns": [], + "needs": [ + "git_submodule_update", + "git_submodule_update_init_options" + ] + } + } + ], + [ + "git_submodule_update_t", + { + "decl": [ + "GIT_SUBMODULE_UPDATE_CHECKOUT", + "GIT_SUBMODULE_UPDATE_REBASE", + "GIT_SUBMODULE_UPDATE_MERGE", + "GIT_SUBMODULE_UPDATE_NONE", + "GIT_SUBMODULE_UPDATE_DEFAULT" + ], + "type": "enum", + "file": "types.h", + "line": 363, + "lineto": 370, + "block": "GIT_SUBMODULE_UPDATE_CHECKOUT\nGIT_SUBMODULE_UPDATE_REBASE\nGIT_SUBMODULE_UPDATE_MERGE\nGIT_SUBMODULE_UPDATE_NONE\nGIT_SUBMODULE_UPDATE_DEFAULT", + "tdef": "typedef", + "description": " Submodule update values", + "comments": "

These values represent settings for the submodule.$name.update configuration value which says how to handle git submodule update for this submodule. The value is usually set in the ".gitmodules" file and copied to ".git/config" when the submodule is initialized.

\n\n

You can override this setting on a per-submodule basis with git_submodule_set_update() and write the changed value to disk using git_submodule_save(). If you have overwritten the value, you can revert it by passing GIT_SUBMODULE_UPDATE_RESET to the set function.

\n\n

The values are:

\n\n
    \n
  • GIT_SUBMODULE_UPDATE_CHECKOUT: the default; when a submodule is updated, checkout the new detached HEAD to the submodule directory. - GIT_SUBMODULE_UPDATE_REBASE: update by rebasing the current checked out branch onto the commit from the superproject. - GIT_SUBMODULE_UPDATE_MERGE: update by merging the commit in the superproject into the current checkout out branch of the submodule. - GIT_SUBMODULE_UPDATE_NONE: do not update this submodule even when the commit in the superproject is updated. - GIT_SUBMODULE_UPDATE_DEFAULT: not used except as static initializer when we don't want any particular update rule to be specified.
  • \n
\n", + "fields": [ + { + "type": "int", + "name": "GIT_SUBMODULE_UPDATE_CHECKOUT", + "comments": "", + "value": 1 + }, + { + "type": "int", + "name": "GIT_SUBMODULE_UPDATE_REBASE", + "comments": "", + "value": 2 + }, + { + "type": "int", + "name": "GIT_SUBMODULE_UPDATE_MERGE", + "comments": "", + "value": 3 + }, + { + "type": "int", + "name": "GIT_SUBMODULE_UPDATE_NONE", + "comments": "", + "value": 4 + }, + { + "type": "int", + "name": "GIT_SUBMODULE_UPDATE_DEFAULT", + "comments": "", + "value": 0 + } + ], + "used": { + "returns": [ + "git_submodule_update_strategy" + ], + "needs": [ + "git_submodule_set_update" + ] + } + } + ], + [ + "git_tag", + { + "decl": "git_tag", + "type": "struct", + "value": "git_tag", + "file": "types.h", + "line": 114, + "lineto": 114, + "tdef": "typedef", + "description": " Parsed representation of a tag object. ", + "comments": "", + "used": { + "returns": [], + "needs": [ + "git_tag_dup", + "git_tag_foreach", + "git_tag_free", + "git_tag_id", + "git_tag_lookup", + "git_tag_lookup_prefix", + "git_tag_message", + "git_tag_name", + "git_tag_owner", + "git_tag_peel", + "git_tag_tagger", + "git_tag_target", + "git_tag_target_id", + "git_tag_target_type" + ] + } + } + ], + [ + "git_time", + { + "decl": [ + "git_time_t time", + "int offset" + ], + "type": "struct", + "value": "git_time", + "file": "types.h", + "line": 156, + "lineto": 159, + "block": "git_time_t time\nint offset", + "tdef": "typedef", + "description": " Time in a signature ", + "comments": "", + "fields": [ + { + "type": "git_time_t", + "name": "time", + "comments": " time in seconds from epoch " + }, + { + "type": "int", + "name": "offset", + "comments": " timezone offset, in minutes " + } + ], + "used": { + "returns": [ + "git_commit_time" + ], + "needs": [ + "git_signature_new" + ] + } + } + ], + [ + "git_trace_level_t", + { + "decl": [ + "GIT_TRACE_NONE", + "GIT_TRACE_FATAL", + "GIT_TRACE_ERROR", + "GIT_TRACE_WARN", + "GIT_TRACE_INFO", + "GIT_TRACE_DEBUG", + "GIT_TRACE_TRACE" + ], + "type": "enum", + "file": "trace.h", + "line": 26, + "lineto": 47, + "block": "GIT_TRACE_NONE\nGIT_TRACE_FATAL\nGIT_TRACE_ERROR\nGIT_TRACE_WARN\nGIT_TRACE_INFO\nGIT_TRACE_DEBUG\nGIT_TRACE_TRACE", + "tdef": "typedef", + "description": " Available tracing levels. When tracing is set to a particular level,\n callers will be provided tracing at the given level and all lower levels.", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_TRACE_NONE", + "comments": "

No tracing will be performed.

\n", + "value": 0 + }, + { + "type": "int", + "name": "GIT_TRACE_FATAL", + "comments": "

Severe errors that may impact the program's execution

\n", + "value": 1 + }, + { + "type": "int", + "name": "GIT_TRACE_ERROR", + "comments": "

Errors that do not impact the program's execution

\n", + "value": 2 + }, + { + "type": "int", + "name": "GIT_TRACE_WARN", + "comments": "

Warnings that suggest abnormal data

\n", + "value": 3 + }, + { + "type": "int", + "name": "GIT_TRACE_INFO", + "comments": "

Informational messages about program execution

\n", + "value": 4 + }, + { + "type": "int", + "name": "GIT_TRACE_DEBUG", + "comments": "

Detailed data that allows for debugging

\n", + "value": 5 + }, + { + "type": "int", + "name": "GIT_TRACE_TRACE", + "comments": "

Exceptionally detailed debugging data

\n", + "value": 6 + } + ], + "used": { + "returns": [], + "needs": [ + "git_trace_callback", + "git_trace_set" + ] + } + } + ], + [ + "git_transaction", + { + "decl": "git_transaction", + "type": "struct", + "value": "git_transaction", + "file": "types.h", + "line": 175, + "lineto": 175, + "tdef": "typedef", + "description": " Transactional interface to references ", + "comments": "", + "used": { + "returns": [], + "needs": [ + "git_config_lock" + ] + } + } + ], + [ + "git_transfer_progress", + { + "decl": [ + "unsigned int total_objects", + "unsigned int indexed_objects", + "unsigned int received_objects", + "unsigned int local_objects", + "unsigned int total_deltas", + "unsigned int indexed_deltas", + "size_t received_bytes" + ], + "type": "struct", + "value": "git_transfer_progress", + "file": "types.h", + "line": 253, + "lineto": 261, + "block": "unsigned int total_objects\nunsigned int indexed_objects\nunsigned int received_objects\nunsigned int local_objects\nunsigned int total_deltas\nunsigned int indexed_deltas\nsize_t received_bytes", + "tdef": "typedef", + "description": " This is passed as the first argument to the callback to allow the\n user to see the progress.", + "comments": "
    \n
  • total_objects: number of objects in the packfile being downloaded - indexed_objects: received objects that have been hashed - received_objects: objects which have been downloaded - local_objects: locally-available objects that have been injected in order to fix a thin pack. - received-bytes: size of the packfile received up to now
  • \n
\n", + "fields": [ + { + "type": "unsigned int", + "name": "total_objects", + "comments": "" + }, + { + "type": "unsigned int", + "name": "indexed_objects", + "comments": "" + }, + { + "type": "unsigned int", + "name": "received_objects", + "comments": "" + }, + { + "type": "unsigned int", + "name": "local_objects", + "comments": "" + }, + { + "type": "unsigned int", + "name": "total_deltas", + "comments": "" + }, + { + "type": "unsigned int", + "name": "indexed_deltas", + "comments": "" + }, + { + "type": "size_t", + "name": "received_bytes", + "comments": "" + } + ], + "used": { + "returns": [ + "git_remote_stats" + ], + "needs": [ + "git_indexer_append", + "git_indexer_commit", + "git_indexer_new", + "git_odb_write_pack", + "git_packbuilder_write", + "git_transfer_progress_cb" + ] + } + } + ], + [ + "git_transport", + { + "decl": "git_transport", + "type": "struct", + "value": "git_transport", + "file": "types.h", + "line": 230, + "lineto": 230, + "tdef": "typedef", + "description": " Interface which represents a transport to communicate with a\n remote.", + "comments": "", + "used": { + "returns": [], + "needs": [ + "git_smart_subtransport_git", + "git_smart_subtransport_http", + "git_smart_subtransport_ssh", + "git_transport_cb", + "git_transport_dummy", + "git_transport_init", + "git_transport_local", + "git_transport_new", + "git_transport_smart", + "git_transport_smart_certificate_check", + "git_transport_ssh_with_paths" + ] + } + } + ], + [ + "git_transport_flags_t", + { + "decl": [ + "GIT_TRANSPORTFLAGS_NONE" + ], + "type": "enum", + "file": "sys/transport.h", + "line": 31, + "lineto": 33, + "block": "GIT_TRANSPORTFLAGS_NONE", + "tdef": "typedef", + "description": " Flags to pass to transport", + "comments": "

Currently unused.

\n", + "fields": [ + { + "type": "int", + "name": "GIT_TRANSPORTFLAGS_NONE", + "comments": "", + "value": 0 + } + ], + "used": { + "returns": [], + "needs": [] + } + } + ], + [ + "git_tree", + { + "decl": "git_tree", + "type": "struct", + "value": "git_tree", + "file": "types.h", + "line": 126, + "lineto": 126, + "tdef": "typedef", + "description": " Representation of a tree object. ", + "comments": "", + "used": { + "returns": [ + "git_tree_entry_byid", + "git_tree_entry_byindex", + "git_tree_entry_byname", + "git_treebuilder_get" + ], + "needs": [ + "git_commit_amend", + "git_commit_create", + "git_commit_create_buffer", + "git_commit_create_v", + "git_commit_tree", + "git_diff_tree_to_index", + "git_diff_tree_to_tree", + "git_diff_tree_to_workdir", + "git_diff_tree_to_workdir_with_index", + "git_index_read_tree", + "git_merge_trees", + "git_pathspec_match_tree", + "git_tree_dup", + "git_tree_entry_byid", + "git_tree_entry_byindex", + "git_tree_entry_byname", + "git_tree_entry_bypath", + "git_tree_entry_cmp", + "git_tree_entry_dup", + "git_tree_entry_filemode", + "git_tree_entry_filemode_raw", + "git_tree_entry_free", + "git_tree_entry_id", + "git_tree_entry_name", + "git_tree_entry_to_object", + "git_tree_entry_type", + "git_tree_entrycount", + "git_tree_free", + "git_tree_id", + "git_tree_lookup", + "git_tree_lookup_prefix", + "git_tree_owner", + "git_tree_walk", + "git_treebuilder_clear", + "git_treebuilder_entrycount", + "git_treebuilder_filter", + "git_treebuilder_filter_cb", + "git_treebuilder_free", + "git_treebuilder_get", + "git_treebuilder_insert", + "git_treebuilder_new", + "git_treebuilder_remove", + "git_treebuilder_write", + "git_treewalk_cb" + ] + } + } + ], + [ + "git_tree_entry", + { + "decl": "git_tree_entry", + "type": "struct", + "value": "git_tree_entry", + "file": "types.h", + "line": 123, + "lineto": 123, + "tdef": "typedef", + "description": " Representation of each one of the entries in a tree object. ", + "comments": "", + "used": { + "returns": [ + "git_tree_entry_byid", + "git_tree_entry_byindex", + "git_tree_entry_byname", + "git_treebuilder_get" + ], + "needs": [ + "git_tree_entry_bypath", + "git_tree_entry_cmp", + "git_tree_entry_dup", + "git_tree_entry_filemode", + "git_tree_entry_filemode_raw", + "git_tree_entry_free", + "git_tree_entry_id", + "git_tree_entry_name", + "git_tree_entry_to_object", + "git_tree_entry_type", + "git_treebuilder_filter_cb", + "git_treebuilder_insert", + "git_treewalk_cb" + ] + } + } + ], + [ + "git_treebuilder", + { + "decl": "git_treebuilder", + "type": "struct", + "value": "git_treebuilder", + "file": "types.h", + "line": 129, + "lineto": 129, + "tdef": "typedef", + "description": " Constructor for in-memory trees ", + "comments": "", + "used": { + "returns": [], + "needs": [ + "git_treebuilder_clear", + "git_treebuilder_entrycount", + "git_treebuilder_filter", + "git_treebuilder_free", + "git_treebuilder_get", + "git_treebuilder_insert", + "git_treebuilder_new", + "git_treebuilder_remove", + "git_treebuilder_write" + ] + } + } + ], + [ + "git_treewalk_mode", + { + "decl": [ + "GIT_TREEWALK_PRE", + "GIT_TREEWALK_POST" + ], + "type": "enum", + "file": "tree.h", + "line": 384, + "lineto": 387, + "block": "GIT_TREEWALK_PRE\nGIT_TREEWALK_POST", + "tdef": "typedef", + "description": " Tree traversal modes ", + "comments": "", + "fields": [ + { + "type": "int", + "name": "GIT_TREEWALK_PRE", + "comments": "", + "value": 0 + }, + { + "type": "int", + "name": "GIT_TREEWALK_POST", + "comments": "", + "value": 1 + } + ], + "used": { + "returns": [], + "needs": [ + "git_tree_walk" + ] + } + } + ], + [ + "git_writestream", + { + "decl": "git_writestream", + "type": "struct", + "value": "git_writestream", + "file": "types.h", + "line": 425, + "lineto": 425, + "tdef": "typedef", + "description": " A type to write in a streaming fashion, for example, for filters. ", + "comments": "", + "used": { + "returns": [], + "needs": [ + "git_blob_create_fromstream", + "git_blob_create_fromstream_commit", + "git_filter_list_stream_blob", + "git_filter_list_stream_data", + "git_filter_list_stream_file" + ] + } + } + ] + ], + "prefix": "include/git2", + "groups": [ + [ + "annotated", + [ + "git_annotated_commit_free", + "git_annotated_commit_from_fetchhead", + "git_annotated_commit_from_ref", + "git_annotated_commit_from_revspec", + "git_annotated_commit_id", + "git_annotated_commit_lookup" + ] + ], + [ + "attr", + [ + "git_attr_add_macro", + "git_attr_cache_flush", + "git_attr_foreach", + "git_attr_get", + "git_attr_get_many", + "git_attr_value" + ] + ], + [ + "blame", + [ + "git_blame_buffer", + "git_blame_file", + "git_blame_free", + "git_blame_get_hunk_byindex", + "git_blame_get_hunk_byline", + "git_blame_get_hunk_count", + "git_blame_init_options" + ] + ], + [ + "blob", + [ + "git_blob_create_frombuffer", + "git_blob_create_fromchunks", + "git_blob_create_fromdisk", + "git_blob_create_fromstream", + "git_blob_create_fromstream_commit", + "git_blob_create_fromworkdir", + "git_blob_dup", + "git_blob_filtered_content", + "git_blob_free", + "git_blob_id", + "git_blob_is_binary", + "git_blob_lookup", + "git_blob_lookup_prefix", + "git_blob_owner", + "git_blob_rawcontent", + "git_blob_rawsize" + ] + ], + [ + "branch", + [ + "git_branch_create", + "git_branch_create_from_annotated", + "git_branch_delete", + "git_branch_is_head", + "git_branch_iterator_free", + "git_branch_iterator_new", + "git_branch_lookup", + "git_branch_move", + "git_branch_name", + "git_branch_next", + "git_branch_set_upstream", + "git_branch_upstream" + ] + ], + [ + "buf", + [ + "git_buf_contains_nul", + "git_buf_free", + "git_buf_grow", + "git_buf_is_binary", + "git_buf_set" + ] + ], + [ + "checkout", + [ + "git_checkout_head", + "git_checkout_index", + "git_checkout_init_options", + "git_checkout_tree" + ] + ], + [ + "cherrypick", + [ + "git_cherrypick", + "git_cherrypick_commit", + "git_cherrypick_init_options" + ] + ], + [ + "clone", + [ + "git_clone", + "git_clone_init_options" + ] + ], + [ + "commit", + [ + "git_commit_amend", + "git_commit_author", + "git_commit_body", + "git_commit_committer", + "git_commit_create", + "git_commit_create_buffer", + "git_commit_create_from_callback", + "git_commit_create_v", + "git_commit_create_with_signature", + "git_commit_dup", + "git_commit_extract_signature", + "git_commit_free", + "git_commit_header_field", + "git_commit_id", + "git_commit_lookup", + "git_commit_lookup_prefix", + "git_commit_message", + "git_commit_message_encoding", + "git_commit_message_raw", + "git_commit_nth_gen_ancestor", + "git_commit_owner", + "git_commit_parent", + "git_commit_parent_id", + "git_commit_parentcount", + "git_commit_raw_header", + "git_commit_summary", + "git_commit_time", + "git_commit_time_offset", + "git_commit_tree", + "git_commit_tree_id" + ] + ], + [ + "config", + [ + "git_config_add_backend", + "git_config_add_file_ondisk", + "git_config_backend_foreach_match", + "git_config_delete_entry", + "git_config_delete_multivar", + "git_config_entry_free", + "git_config_find_global", + "git_config_find_programdata", + "git_config_find_system", + "git_config_find_xdg", + "git_config_foreach", + "git_config_foreach_match", + "git_config_free", + "git_config_get_bool", + "git_config_get_entry", + "git_config_get_int32", + "git_config_get_int64", + "git_config_get_mapped", + "git_config_get_multivar_foreach", + "git_config_get_path", + "git_config_get_string", + "git_config_get_string_buf", + "git_config_init_backend", + "git_config_iterator_free", + "git_config_iterator_glob_new", + "git_config_iterator_new", + "git_config_lock", + "git_config_lookup_map_value", + "git_config_multivar_iterator_new", + "git_config_new", + "git_config_next", + "git_config_open_default", + "git_config_open_global", + "git_config_open_level", + "git_config_open_ondisk", + "git_config_parse_bool", + "git_config_parse_int32", + "git_config_parse_int64", + "git_config_parse_path", + "git_config_set_bool", + "git_config_set_int32", + "git_config_set_int64", + "git_config_set_multivar", + "git_config_set_string", + "git_config_snapshot" + ] + ], + [ + "cred", + [ + "git_cred_default_new", + "git_cred_free", + "git_cred_has_username", + "git_cred_ssh_custom_new", + "git_cred_ssh_interactive_new", + "git_cred_ssh_key_from_agent", + "git_cred_ssh_key_memory_new", + "git_cred_ssh_key_new", + "git_cred_username_new", + "git_cred_userpass", + "git_cred_userpass_plaintext_new" + ] + ], + [ + "describe", + [ + "git_describe_commit", + "git_describe_format", + "git_describe_result_free", + "git_describe_workdir" + ] + ], + [ + "diff", + [ + "git_diff_blob_to_buffer", + "git_diff_blobs", + "git_diff_buffers", + "git_diff_commit_as_email", + "git_diff_find_init_options", + "git_diff_find_similar", + "git_diff_foreach", + "git_diff_format_email", + "git_diff_format_email_init_options", + "git_diff_free", + "git_diff_get_delta", + "git_diff_get_perfdata", + "git_diff_get_stats", + "git_diff_index_to_index", + "git_diff_index_to_workdir", + "git_diff_init_options", + "git_diff_is_sorted_icase", + "git_diff_merge", + "git_diff_num_deltas", + "git_diff_num_deltas_of_type", + "git_diff_print", + "git_diff_print_callback__to_buf", + "git_diff_print_callback__to_file_handle", + "git_diff_stats_deletions", + "git_diff_stats_files_changed", + "git_diff_stats_free", + "git_diff_stats_insertions", + "git_diff_stats_to_buf", + "git_diff_status_char", + "git_diff_tree_to_index", + "git_diff_tree_to_tree", + "git_diff_tree_to_workdir", + "git_diff_tree_to_workdir_with_index" + ] + ], + [ + "fetch", + [ + "git_fetch_init_options" + ] + ], + [ + "filter", + [ + "git_filter_list_apply_to_blob", + "git_filter_list_apply_to_data", + "git_filter_list_apply_to_file", + "git_filter_list_contains", + "git_filter_list_free", + "git_filter_list_load", + "git_filter_list_new", + "git_filter_list_push", + "git_filter_list_stream_blob", + "git_filter_list_stream_data", + "git_filter_list_stream_file", + "git_filter_lookup", + "git_filter_register", + "git_filter_source_filemode", + "git_filter_source_flags", + "git_filter_source_id", + "git_filter_source_mode", + "git_filter_source_path", + "git_filter_source_repo", + "git_filter_unregister" + ] + ], + [ + "giterr", + [ + "giterr_clear", + "giterr_last", + "giterr_set_oom", + "giterr_set_str" + ] + ], + [ + "graph", + [ + "git_graph_ahead_behind", + "git_graph_descendant_of" + ] + ], + [ + "hashsig", + [ + "git_hashsig_compare", + "git_hashsig_create_fromfile", + "git_hashsig_free" + ] + ], + [ + "ignore", + [ + "git_ignore_add_rule", + "git_ignore_clear_internal_rules", + "git_ignore_path_is_ignored" + ] + ], + [ + "index", + [ + "git_index_add", + "git_index_add_all", + "git_index_add_bypath", + "git_index_add_frombuffer", + "git_index_caps", + "git_index_checksum", + "git_index_clear", + "git_index_conflict_add", + "git_index_conflict_cleanup", + "git_index_conflict_get", + "git_index_conflict_iterator_free", + "git_index_conflict_iterator_new", + "git_index_conflict_next", + "git_index_conflict_remove", + "git_index_entry_is_conflict", + "git_index_entry_stage", + "git_index_entrycount", + "git_index_find", + "git_index_find_prefix", + "git_index_free", + "git_index_get_byindex", + "git_index_get_bypath", + "git_index_has_conflicts", + "git_index_new", + "git_index_open", + "git_index_owner", + "git_index_path", + "git_index_read", + "git_index_read_tree", + "git_index_remove", + "git_index_remove_all", + "git_index_remove_bypath", + "git_index_remove_directory", + "git_index_set_caps", + "git_index_update_all", + "git_index_write", + "git_index_write_tree", + "git_index_write_tree_to" + ] + ], + [ + "indexer", + [ + "git_indexer_append", + "git_indexer_commit", + "git_indexer_free", + "git_indexer_hash", + "git_indexer_new" + ] + ], + [ + "libgit2", + [ + "git_libgit2_features", + "git_libgit2_init", + "git_libgit2_opts", + "git_libgit2_shutdown", + "git_libgit2_version" + ] + ], + [ + "mempack", + [ + "git_mempack_new", + "git_mempack_reset" + ] + ], + [ + "merge", + [ + "git_merge", + "git_merge_analysis", + "git_merge_base", + "git_merge_base_many", + "git_merge_base_octopus", + "git_merge_bases", + "git_merge_bases_many", + "git_merge_commits", + "git_merge_file", + "git_merge_file_from_index", + "git_merge_file_init_input", + "git_merge_file_init_options", + "git_merge_file_result_free", + "git_merge_init_options", + "git_merge_trees" + ] + ], + [ + "message", + [ + "git_message_prettify" + ] + ], + [ + "note", + [ + "git_note_author", + "git_note_committer", + "git_note_create", + "git_note_foreach", + "git_note_free", + "git_note_id", + "git_note_iterator_free", + "git_note_iterator_new", + "git_note_message", + "git_note_next", + "git_note_read", + "git_note_remove" + ] + ], + [ + "object", + [ + "git_object__size", + "git_object_dup", + "git_object_free", + "git_object_id", + "git_object_lookup", + "git_object_lookup_bypath", + "git_object_lookup_prefix", + "git_object_owner", + "git_object_peel", + "git_object_short_id", + "git_object_string2type", + "git_object_type", + "git_object_type2string", + "git_object_typeisloose" + ] + ], + [ + "odb", + [ + "git_odb_add_alternate", + "git_odb_add_backend", + "git_odb_add_disk_alternate", + "git_odb_backend_loose", + "git_odb_backend_one_pack", + "git_odb_backend_pack", + "git_odb_exists", + "git_odb_exists_prefix", + "git_odb_expand_ids", + "git_odb_foreach", + "git_odb_free", + "git_odb_get_backend", + "git_odb_hash", + "git_odb_hashfile", + "git_odb_init_backend", + "git_odb_new", + "git_odb_num_backends", + "git_odb_object_data", + "git_odb_object_dup", + "git_odb_object_free", + "git_odb_object_id", + "git_odb_object_size", + "git_odb_object_type", + "git_odb_open", + "git_odb_open_rstream", + "git_odb_open_wstream", + "git_odb_read", + "git_odb_read_header", + "git_odb_read_prefix", + "git_odb_refresh", + "git_odb_stream_finalize_write", + "git_odb_stream_free", + "git_odb_stream_read", + "git_odb_stream_write", + "git_odb_write", + "git_odb_write_pack" + ] + ], + [ + "oid", + [ + "git_oid_cmp", + "git_oid_cpy", + "git_oid_equal", + "git_oid_fmt", + "git_oid_fromraw", + "git_oid_fromstr", + "git_oid_fromstrn", + "git_oid_fromstrp", + "git_oid_iszero", + "git_oid_ncmp", + "git_oid_nfmt", + "git_oid_pathfmt", + "git_oid_shorten_add", + "git_oid_shorten_free", + "git_oid_shorten_new", + "git_oid_strcmp", + "git_oid_streq", + "git_oid_tostr", + "git_oid_tostr_s" + ] + ], + [ + "oidarray", + [ + "git_oidarray_free" + ] + ], + [ + "openssl", + [ + "git_openssl_set_locking" + ] + ], + [ + "packbuilder", + [ + "git_packbuilder_foreach", + "git_packbuilder_free", + "git_packbuilder_hash", + "git_packbuilder_insert", + "git_packbuilder_insert_commit", + "git_packbuilder_insert_recur", + "git_packbuilder_insert_tree", + "git_packbuilder_insert_walk", + "git_packbuilder_new", + "git_packbuilder_object_count", + "git_packbuilder_set_callbacks", + "git_packbuilder_set_threads", + "git_packbuilder_write", + "git_packbuilder_written" + ] + ], + [ + "patch", + [ + "git_patch_free", + "git_patch_from_blob_and_buffer", + "git_patch_from_blobs", + "git_patch_from_buffers", + "git_patch_from_diff", + "git_patch_get_delta", + "git_patch_get_hunk", + "git_patch_get_line_in_hunk", + "git_patch_line_stats", + "git_patch_num_hunks", + "git_patch_num_lines_in_hunk", + "git_patch_print", + "git_patch_size", + "git_patch_to_buf" + ] + ], + [ + "pathspec", + [ + "git_pathspec_free", + "git_pathspec_match_diff", + "git_pathspec_match_index", + "git_pathspec_match_list_diff_entry", + "git_pathspec_match_list_entry", + "git_pathspec_match_list_entrycount", + "git_pathspec_match_list_failed_entry", + "git_pathspec_match_list_failed_entrycount", + "git_pathspec_match_list_free", + "git_pathspec_match_tree", + "git_pathspec_match_workdir", + "git_pathspec_matches_path", + "git_pathspec_new" + ] + ], + [ + "proxy", + [ + "git_proxy_init_options" + ] + ], + [ + "push", + [ + "git_push_init_options" + ] + ], + [ + "rebase", + [ + "git_rebase_abort", + "git_rebase_commit", + "git_rebase_finish", + "git_rebase_free", + "git_rebase_init", + "git_rebase_init_options", + "git_rebase_inmemory_index", + "git_rebase_next", + "git_rebase_open", + "git_rebase_operation_byindex", + "git_rebase_operation_current", + "git_rebase_operation_entrycount" + ] + ], + [ + "refdb", + [ + "git_refdb_backend_fs", + "git_refdb_compress", + "git_refdb_free", + "git_refdb_init_backend", + "git_refdb_new", + "git_refdb_open", + "git_refdb_set_backend" + ] + ], + [ + "reference", + [ + "git_reference__alloc", + "git_reference__alloc_symbolic", + "git_reference_cmp", + "git_reference_create", + "git_reference_create_matching", + "git_reference_delete", + "git_reference_dwim", + "git_reference_ensure_log", + "git_reference_foreach", + "git_reference_foreach_glob", + "git_reference_foreach_name", + "git_reference_free", + "git_reference_has_log", + "git_reference_is_branch", + "git_reference_is_note", + "git_reference_is_remote", + "git_reference_is_tag", + "git_reference_is_valid_name", + "git_reference_iterator_free", + "git_reference_iterator_glob_new", + "git_reference_iterator_new", + "git_reference_list", + "git_reference_lookup", + "git_reference_name", + "git_reference_name_to_id", + "git_reference_next", + "git_reference_next_name", + "git_reference_normalize_name", + "git_reference_owner", + "git_reference_peel", + "git_reference_remove", + "git_reference_rename", + "git_reference_resolve", + "git_reference_set_target", + "git_reference_shorthand", + "git_reference_symbolic_create", + "git_reference_symbolic_create_matching", + "git_reference_symbolic_set_target", + "git_reference_symbolic_target", + "git_reference_target", + "git_reference_target_peel", + "git_reference_type" + ] + ], + [ + "reflog", + [ + "git_reflog_append", + "git_reflog_delete", + "git_reflog_drop", + "git_reflog_entry_byindex", + "git_reflog_entry_committer", + "git_reflog_entry_id_new", + "git_reflog_entry_id_old", + "git_reflog_entry_message", + "git_reflog_entrycount", + "git_reflog_free", + "git_reflog_read", + "git_reflog_rename", + "git_reflog_write" + ] + ], + [ + "refspec", + [ + "git_refspec_direction", + "git_refspec_dst", + "git_refspec_dst_matches", + "git_refspec_force", + "git_refspec_rtransform", + "git_refspec_src", + "git_refspec_src_matches", + "git_refspec_string", + "git_refspec_transform" + ] + ], + [ + "remote", + [ + "git_remote_add_fetch", + "git_remote_add_push", + "git_remote_autotag", + "git_remote_connect", + "git_remote_connected", + "git_remote_create", + "git_remote_create_anonymous", + "git_remote_create_with_fetchspec", + "git_remote_default_branch", + "git_remote_delete", + "git_remote_disconnect", + "git_remote_download", + "git_remote_dup", + "git_remote_fetch", + "git_remote_free", + "git_remote_get_fetch_refspecs", + "git_remote_get_push_refspecs", + "git_remote_get_refspec", + "git_remote_init_callbacks", + "git_remote_is_valid_name", + "git_remote_list", + "git_remote_lookup", + "git_remote_ls", + "git_remote_name", + "git_remote_owner", + "git_remote_prune", + "git_remote_prune_refs", + "git_remote_push", + "git_remote_pushurl", + "git_remote_refspec_count", + "git_remote_rename", + "git_remote_set_autotag", + "git_remote_set_pushurl", + "git_remote_set_url", + "git_remote_stats", + "git_remote_stop", + "git_remote_update_tips", + "git_remote_upload", + "git_remote_url" + ] + ], + [ + "repository", + [ + "git_repository__cleanup", + "git_repository_config", + "git_repository_config_snapshot", + "git_repository_detach_head", + "git_repository_discover", + "git_repository_fetchhead_foreach", + "git_repository_free", + "git_repository_get_namespace", + "git_repository_hashfile", + "git_repository_head", + "git_repository_head_detached", + "git_repository_head_unborn", + "git_repository_ident", + "git_repository_index", + "git_repository_init", + "git_repository_init_ext", + "git_repository_init_init_options", + "git_repository_is_bare", + "git_repository_is_empty", + "git_repository_is_shallow", + "git_repository_mergehead_foreach", + "git_repository_message", + "git_repository_message_remove", + "git_repository_new", + "git_repository_odb", + "git_repository_open", + "git_repository_open_bare", + "git_repository_open_ext", + "git_repository_path", + "git_repository_refdb", + "git_repository_reinit_filesystem", + "git_repository_set_bare", + "git_repository_set_config", + "git_repository_set_head", + "git_repository_set_head_detached", + "git_repository_set_head_detached_from_annotated", + "git_repository_set_ident", + "git_repository_set_index", + "git_repository_set_namespace", + "git_repository_set_odb", + "git_repository_set_refdb", + "git_repository_set_workdir", + "git_repository_state", + "git_repository_state_cleanup", + "git_repository_workdir", + "git_repository_wrap_odb" + ] + ], + [ + "reset", + [ + "git_reset", + "git_reset_default", + "git_reset_from_annotated" + ] + ], + [ + "revert", + [ + "git_revert", + "git_revert_commit", + "git_revert_init_options" + ] + ], + [ + "revparse", + [ + "git_revparse", + "git_revparse_ext", + "git_revparse_single" + ] + ], + [ + "revwalk", + [ + "git_revwalk_add_hide_cb", + "git_revwalk_free", + "git_revwalk_hide", + "git_revwalk_hide_glob", + "git_revwalk_hide_head", + "git_revwalk_hide_ref", + "git_revwalk_new", + "git_revwalk_next", + "git_revwalk_push", + "git_revwalk_push_glob", + "git_revwalk_push_head", + "git_revwalk_push_range", + "git_revwalk_push_ref", + "git_revwalk_repository", + "git_revwalk_reset", + "git_revwalk_simplify_first_parent", + "git_revwalk_sorting" + ] + ], + [ + "signature", + [ + "git_signature_default", + "git_signature_dup", + "git_signature_free", + "git_signature_new", + "git_signature_now" + ] + ], + [ + "smart", + [ + "git_smart_subtransport_git", + "git_smart_subtransport_http", + "git_smart_subtransport_ssh" + ] + ], + [ + "stash", + [ + "git_stash_apply", + "git_stash_apply_init_options", + "git_stash_drop", + "git_stash_foreach", + "git_stash_pop" + ] + ], + [ + "status", + [ + "git_status_byindex", + "git_status_file", + "git_status_foreach", + "git_status_foreach_ext", + "git_status_init_options", + "git_status_list_entrycount", + "git_status_list_free", + "git_status_list_get_perfdata", + "git_status_list_new", + "git_status_should_ignore" + ] + ], + [ + "strarray", + [ + "git_strarray_copy", + "git_strarray_free" + ] + ], + [ + "stream", + [ + "git_stream_register_tls" + ] + ], + [ + "submodule", + [ + "git_submodule_add_finalize", + "git_submodule_add_setup", + "git_submodule_add_to_index", + "git_submodule_branch", + "git_submodule_fetch_recurse_submodules", + "git_submodule_foreach", + "git_submodule_free", + "git_submodule_head_id", + "git_submodule_ignore", + "git_submodule_index_id", + "git_submodule_init", + "git_submodule_location", + "git_submodule_lookup", + "git_submodule_name", + "git_submodule_open", + "git_submodule_owner", + "git_submodule_path", + "git_submodule_reload", + "git_submodule_repo_init", + "git_submodule_resolve_url", + "git_submodule_set_branch", + "git_submodule_set_fetch_recurse_submodules", + "git_submodule_set_ignore", + "git_submodule_set_update", + "git_submodule_set_url", + "git_submodule_status", + "git_submodule_sync", + "git_submodule_update", + "git_submodule_update_init_options", + "git_submodule_update_strategy", + "git_submodule_url", + "git_submodule_wd_id" + ] + ], + [ + "tag", + [ + "git_tag_annotation_create", + "git_tag_create", + "git_tag_create_frombuffer", + "git_tag_create_lightweight", + "git_tag_delete", + "git_tag_dup", + "git_tag_foreach", + "git_tag_free", + "git_tag_id", + "git_tag_list", + "git_tag_list_match", + "git_tag_lookup", + "git_tag_lookup_prefix", + "git_tag_message", + "git_tag_name", + "git_tag_owner", + "git_tag_peel", + "git_tag_tagger", + "git_tag_target", + "git_tag_target_id", + "git_tag_target_type" + ] + ], + [ + "trace", + [ + "git_trace_set" + ] + ], + [ + "transport", + [ + "git_transport_dummy", + "git_transport_init", + "git_transport_local", + "git_transport_new", + "git_transport_smart", + "git_transport_smart_certificate_check", + "git_transport_ssh_with_paths", + "git_transport_unregister" + ] + ], + [ + "tree", + [ + "git_tree_dup", + "git_tree_entry_byid", + "git_tree_entry_byindex", + "git_tree_entry_byname", + "git_tree_entry_bypath", + "git_tree_entry_cmp", + "git_tree_entry_dup", + "git_tree_entry_filemode", + "git_tree_entry_filemode_raw", + "git_tree_entry_free", + "git_tree_entry_id", + "git_tree_entry_name", + "git_tree_entry_to_object", + "git_tree_entry_type", + "git_tree_entrycount", + "git_tree_free", + "git_tree_id", + "git_tree_lookup", + "git_tree_lookup_prefix", + "git_tree_owner", + "git_tree_walk" + ] + ], + [ + "treebuilder", + [ + "git_treebuilder_clear", + "git_treebuilder_entrycount", + "git_treebuilder_filter", + "git_treebuilder_free", + "git_treebuilder_get", + "git_treebuilder_insert", + "git_treebuilder_new", + "git_treebuilder_remove", + "git_treebuilder_write" + ] + ] + ], + "examples": [ + [ + "add.c", + "ex/HEAD/add.html" + ], + [ + "blame.c", + "ex/HEAD/blame.html" + ], + [ + "cat-file.c", + "ex/HEAD/cat-file.html" + ], + [ + "common.c", + "ex/HEAD/common.html" + ], + [ + "describe.c", + "ex/HEAD/describe.html" + ], + [ + "diff.c", + "ex/HEAD/diff.html" + ], + [ + "for-each-ref.c", + "ex/HEAD/for-each-ref.html" + ], + [ + "general.c", + "ex/HEAD/general.html" + ], + [ + "init.c", + "ex/HEAD/init.html" + ], + [ + "log.c", + "ex/HEAD/log.html" + ], + [ + "network/clone.c", + "ex/HEAD/network/clone.html" + ], + [ + "network/common.c", + "ex/HEAD/network/common.html" + ], + [ + "network/fetch.c", + "ex/HEAD/network/fetch.html" + ], + [ + "network/git2.c", + "ex/HEAD/network/git2.html" + ], + [ + "network/index-pack.c", + "ex/HEAD/network/index-pack.html" + ], + [ + "network/ls-remote.c", + "ex/HEAD/network/ls-remote.html" + ], + [ + "remote.c", + "ex/HEAD/remote.html" + ], + [ + "rev-list.c", + "ex/HEAD/rev-list.html" + ], + [ + "rev-parse.c", + "ex/HEAD/rev-parse.html" + ], + [ + "showindex.c", + "ex/HEAD/showindex.html" + ], + [ + "status.c", + "ex/HEAD/status.html" + ], + [ + "tag.c", + "ex/HEAD/tag.html" + ] + ] +} diff --git a/generate/input/libgit2-supplement.json b/generate/input/libgit2-supplement.json new file mode 100644 index 000000000..0e46a5686 --- /dev/null +++ b/generate/input/libgit2-supplement.json @@ -0,0 +1,755 @@ +{ + "types": { + "git_cred_default": { + "decl": "git_cred" + }, + "git_diff_hunk": { + "decl": [ + "int old_start", + "int old_lines", + "int new_start", + "int new_lines", + "size_t header_len", + "char header[128]" + ], + "fields": [ + { + "name": "old_start", + "type": "int" + }, + { + "name": "old_lines", + "type": "int" + }, + { + "name": "new_start", + "type": "int" + }, + { + "name": "new_lines", + "type": "int" + }, + { + "name": "header_len", + "type": "size_t" + }, + { + "name": "header", + "type": "char *" + } + ] + }, + "git_diff_line": { + "decl": [ + "int origin", + "int old_lineno", + "int new_lineno", + "int num_lines", + "size_t, content_len", + "git_off_t content_offset", + "const char * content" + ], + "fields": [ + { + "name": "origin", + "type": "int" + }, + { + "name": "old_lineno", + "type": "int" + }, + { + "name": "new_lineno", + "type": "int" + }, + { + "name": "num_lines", + "type": "int" + }, + { + "name": "content_len", + "type": "size_t" + }, + { + "name": "content_offset", + "type": "git_off_t" + }, + { + "name": "content", + "type": "const char *" + } + ] + }, + "git_note_iterator": { + "decl": "git_iterator" + } + }, + "new" : { + "functions": { + "git_patch_convenient_from_diff": { + "args": [ + { + "name": "diff", + "type": "git_diff *" + }, + { + "name": "out", + "type": "std::vector *" + } + ], + "type": "function", + "isManual": true, + "cFile": "generate/templates/manual/patches/convenient_patches.cc", + "isAsync": true, + "isPrototypeMethod": false, + "group": "patch", + "return": { + "type": "int", + "isErrorCode": true + } + }, + "git_rebase_next": { + "type": "function", + "file": "rebase.h", + "args": [ + { + "name": "out", + "type": "git_rebase_operation **" + }, + { + "name": "rebase", + "type": "git_rebase *" + } + ], + "return": { + "type": "int" + }, + "group": "rebase" + }, + "git_reset": { + "type": "function", + "file": "reset.h", + "args": [ + { + "name": "repo", + "type": "git_repository *" + }, + { + "name": "target", + "type": "git_object *" + }, + { + "name": "reset_type", + "type": "git_reset_t" + }, + { + "name": "checkout_opts", + "type": "git_checkout_options *" + } + ], + "return": { + "type": "int" + }, + "group": "reset" + }, + "git_revwalk_fast_walk": { + "args": [ + { + "name": "max_count", + "type": "int" + }, + { + "name": "out", + "type": "std::vector *" + }, + { + "name": "walk", + "type": "git_revwalk *" + } + ], + "type": "function", + "isManual": true, + "cFile": "generate/templates/manual/revwalk/fast_walk.cc", + "isAsync": true, + "isPrototypeMethod": true, + "group": "revwalk", + "return": { + "type": "int", + "isErrorCode": true + } + }, + "git_revwalk_file_history_walk": { + "args": [ + { + "name": "file_path", + "type": "const char *" + }, + { + "name": "max_count", + "type": "int" + }, + { + "name": "out", + "type": "std::vector< std::pair > *> *" + }, + { + "name": "walk", + "type": "git_revwalk *" + } + ], + "type": "function", + "isManual": true, + "cFile": "generate/templates/manual/revwalk/file_history_walk.cc", + "isAsync": true, + "isPrototypeMethod": true, + "group": "revwalk", + "return": { + "type": "int", + "isErrorCode": true + } + }, + "git_stash_save": { + "type": "function", + "file": "stash.h", + "args": [ + { + "name": "out", + "type": "git_oid *" + }, + { + "name": "repo", + "type": "git_repository *" + }, + { + "name": "stasher", + "type": "const git_signature *" + }, + { + "name": "message", + "type": "const char *" + }, + { + "name": "flags", + "type": "unsigned int" + } + ], + "return": { + "type": "int" + }, + "group": "stash" + } + }, + "groups": [ + [ + "annotated_commit", + [ + "git_annotated_commit_free", + "git_annotated_commit_from_fetchhead", + "git_annotated_commit_from_ref", + "git_annotated_commit_from_revspec", + "git_annotated_commit_id", + "git_annotated_commit_lookup" + ] + ], + [ + "odb_object", + [ + "git_odb_object_data", + "git_odb_object_dup", + "git_odb_object_free", + "git_odb_object_id", + "git_odb_object_size", + "git_odb_object_type" + ] + ], + [ + "patch", + [ + "git_patch_convenient_from_diff" + ] + ], + [ + "reflog_entry", + [ + "git_reflog_entry_committer", + "git_reflog_entry_id_new", + "git_reflog_entry_id_old", + "git_reflog_entry_message" + ] + ], + [ + "revwalk", + [ + "git_revwalk_fast_walk", + "git_revwalk_file_history_walk" + ] + ], + [ + "status_list", + [ + "git_status_list_entrycount", + "git_status_list_free", + "git_status_list_get_perfdata", + "git_status_list_new" + ] + ], + [ + "tree_entry", + [ + "git_tree_entry_filemode", + "git_tree_entry_filemode_raw", + "git_tree_entry_free", + "git_tree_entry_id", + "git_tree_entry_name", + "git_tree_entry_to_object", + "git_tree_entry_type" + ] + ] + ], + "types": [ + [ + "git_stash_apply_progress_t", + { + "type": "enum", + "fields": [ + { + "type": "int", + "name": "GIT_STASH_APPLY_PROGRESS_NONE", + "value": 0 + }, + { + "type": "int", + "name": "GIT_STASH_APPLY_PROGRESS_LOADING_STASH", + "value": 1 + }, + { + "type": "int", + "name": "GIT_STASH_APPLY_PROGRESS_ANALYZE_INDEX", + "value": 2 + }, + { + "type": "int", + "name": "GIT_STASH_APPLY_PROGRESS_ANALYZE_MODIFIED", + "value": 3 + }, + { + "type": "int", + "name": "GIT_STASH_APPLY_PROGRESS_ANALYZE_UNTRACKED", + "value": 4 + }, + { + "type": "int", + "name": "GIT_STASH_APPLY_PROGRESS_CHECKOUT_UNTRACKED", + "value": 5 + }, + { + "type": "int", + "name": "GIT_STASH_APPLY_PROGRESS_CHECKOUT_MODIFIED", + "value": 6 + }, + { + "type": "int", + "name": "GIT_STASH_APPLY_PROGRESS_DONE", + "value": 7 + } + ] + } + ], + [ + "git_config_entry", + { + "fields": [ + { + "type": "const char *", + "name": "name" + }, + { + "type": "const char *", + "name": "value" + }, + { + "type": "git_config_level_t", + "name": "level" + }, + { + "type": "void (*)(struct git_config_entry *)", + "name": "free", + "ignore": true + } + ] + } + ], + [ + "git_filter", + { + "type": "struct", + "fields": [ + { + "type": "unsigned int", + "name": "version" + }, + { + "type": "const char *", + "name": "attributes" + }, + { + "type": "git_filter_init_fn", + "name": "initialize", + "ignore": true + }, + { + "type": "git_filter_shutdown_fn", + "name": "shutdown", + "ignore": true + }, + { + "type": "git_filter_check_fn", + "name": "check", + "ignore": true + }, + { + "type": "git_filter_apply_fn", + "name": "apply", + "ignore": true + }, + { + "type": "git_filter_stream_fn", + "name": "stream", + "ignore": true + }, + { + "type": "git_filter_cleanup_fn", + "name": "cleanup", + "ignore": true + } + ] + } + ], + [ + "git_status_entry", + { + "fields": [ + { + "type": "git_status_t", + "name": "status" + }, + { + "type": "git_diff_delta *", + "name": "head_to_index" + }, + { + "type": "git_diff_delta *", + "name": "index_to_workdir" + } + ] + } + ], + [ + "git_diff_perfdata", + { + "type": "struct", + "fields": [ + { + "type": "unsigned int", + "name": "version" + }, + { + "type": "size_t", + "name": "stat_calls" + }, + { + "type": "size_t", + "name": "oid_calculations" + } + ] + } + ], + [ + "git_fetch_options", + { + "type": "struct", + "fields": [ + { + "name": "version", + "type": "int" + }, + { + "name": "callbacks", + "type": "git_remote_callbacks" + }, + { + "name": "prune", + "type": "git_fetch_prune_t" + }, + { + "name": "update_fetchhead", + "type": "int" + }, + { + "name": "download_tags", + "type": "git_remote_autotag_option_t" + } + ], + "used": { + "needs": [ + "git_fetch_init_options", + "git_remote_init_callbacks" + ] + } + } + ], + [ + "git_fetch_prune_t", + { + "type": "enum", + "fields": [ + { + "type": "int", + "name": "git_fetch_prune_unspecified", + "value": 0 + }, + { + "type": "int", + "name": "git_fetch_prune", + "value": 1 + }, + { + "type": "int", + "name": "git_fetch_no_prune", + "value": 2 + } + ] + } + ], + [ + "git_off_t", + { + "type": "enum" + } + ], + [ + "git_rebase_options", + { + "type": "struct", + "fields": [ + { + "type": "unsigned int", + "name": "version" + }, + { + "type": "int", + "name": "quiet" + }, + { + "type": "const char *", + "name": "rewrite_notes_ref" + }, + { + "type": "git_checkout_options", + "name": "checkout_options" + }, + { + "type": "git_merge_options", + "name": "merge_options" + } + ], + "used": { + "needs": [ + "git_rebase_init_options", + "git_checkout_init_options", + "git_merge_init_options" + ] + } + } + ], + [ + "git_remote_autotag_option_t", + { + "type": "enum" + } + ], + [ + "git_remote_callbacks", + { + "type": "struct", + "fields": [ + { + "type": "unsigned int", + "name": "version" + }, + { + "type": "git_transport_message_cb", + "name": "sideband_progress" + }, + { + "type": "git_cred_acquire_cb", + "name": "credentials" + }, + { + "type": "git_transport_certificate_check_cb", + "name": "certificate_check" + }, + { + "type": "git_transfer_progress_cb", + "name": "transfer_progress" + }, + { + "type": "git_transport_cb", + "name": "transport", + "ignore": true + }, + { + "type": "void *", + "name": "payload" + } + ], + "used": { + "needs": [ + "git_remote_init_callbacks" + ] + } + } + ], + [ + "git_time_t", + { + "type": "enum" + } + ], + [ + "git_trace_level_t", + { + "type": "enum" + } + ], + [ + "git_status_options", + { + "type": "struct", + "fields": [ + { + "type": "unsigned int", + "name": "version" + }, + { + "type": "git_status_show_t", + "name": "show" + }, + { + "type": "git_status_opt_t", + "name": "flags" + }, + { + "type": "git_strarray", + "name": "pathspec" + } + ], + "used": { + "needs": [ + "git_status_init_options", + "git_status_foreach_ext", + "git_status_list_new" + ] + } + } + ], + [ + "git_stash_apply_options", + { + "type": "struct", + "fields": [ + { + "type": "unsigned int", + "name": "version" + }, + { + "type": "git_stash_apply_flags", + "name": "flags" + }, + { + "type": "git_checkout_options", + "name": "checkout_options" + }, + { + "type": "git_stash_apply_progress_cb", + "name": "progress_cb" + }, + { + "type": "void *", + "name": "progress_payload" + } + ], + "used": { + "needs": [ + "git_stash_apply_init_options", + "git_checkout_init_options" + ] + } + } + ] + ] + }, + "remove": { + "annotated": { + "functions": [ + "git_annotated_commit_free", + "git_annotated_commit_from_fetchhead", + "git_annotated_commit_from_ref", + "git_annotated_commit_from_revspec", + "git_annotated_commit_id", + "git_annotated_commit_lookup" + ] + }, + "odb": { + "functions": [ + "git_odb_object_data", + "git_odb_object_dup", + "git_odb_object_free", + "git_odb_object_id", + "git_odb_object_size", + "git_odb_object_type" + ] + }, + "merge": { + "functions": [ + "git_merge_head_free", + "git_merge_head_from_fetchhead", + "git_merge_head_from_id", + "git_merge_head_from_ref", + "git_merge_head_id" + ] + }, + "reflog": { + "functions": [ + "git_reflog_entry_committer", + "git_reflog_entry_id_new", + "git_reflog_entry_id_old", + "git_reflog_entry_message" + ] + }, + "status": { + "functions": [ + "git_status_list_entrycount", + "git_status_list_free", + "git_status_list_get_perfdata", + "git_status_list_new" + ] + }, + "tree": { + "functions": [ + "git_tree_entry_filemode", + "git_tree_entry_filemode_raw", + "git_tree_entry_free", + "git_tree_entry_id", + "git_tree_entry_name", + "git_tree_entry_to_object", + "git_tree_entry_type" + ] + } + }, + "groups": { + "stash": [ + "git_stash_save" + ] + } +} diff --git a/generate/scripts/generateJson.js b/generate/scripts/generateJson.js new file mode 100644 index 000000000..f1afeb956 --- /dev/null +++ b/generate/scripts/generateJson.js @@ -0,0 +1,240 @@ +const path = require("path"); +const utils = require("./utils"); +var _; + +var libgit2 = require("../input/libgit2-docs.json"); +var descriptor = require("../input/descriptor.json"); +var supplement = require("../input/libgit2-supplement.json"); + +module.exports = function generateJson() { + var helpers = require("./helpers"); + _ = require("lodash"); + // libgit2's docs aren't complete so we'll add in what they're missing here + libgit2.types.forEach(function(type) { + if (supplement.types[type[0]]) { + _.merge(type[1], supplement.types[type[0]]); + } + }); + + libgit2.groups.forEach(function(group) { + if (supplement.groups[group[0]]) { + Array.prototype.push.apply(group[1], supplement.groups[group[0]]); + } + }); + + Array.prototype.push.apply(libgit2.types, supplement.new.types); + Array.prototype.push.apply(libgit2.groups, supplement.new.groups); + _.merge(libgit2.functions, supplement.new.functions); + + var output = []; + var dependencyLookup = {}; + var types = []; + var enums = []; + + // reduce all of the groups into a hashmap and a name array for easy lookup + var groups = libgit2.groups.reduce(function(memo, group) { + var groupName = group[0]; + + // Some functions are in the wrong group so we can't just ignore them. + // We have to completely remove them from one group and manually add them + // into the other. + var functionNames = group[1].filter(function(fnName) { + return !supplement.remove[groupName] || + !supplement.remove[groupName].functions || + !~supplement.remove[groupName].functions.indexOf(fnName); + }); + + // If we've already found some functions for this group lets add the new + // ones we found instead of overwriting the old ones + if (memo[groupName]) { + memo[groupName] = memo[groupName].concat(functionNames); + } + // if we have an empty group then just ignore it + else if (functionNames.length) { + memo[groupName] = functionNames; + } + + return memo; + }, {}); + + + // Split each type from the array into classes/structs and enums + // each entry is of type ['name', {definingobject}] + libgit2.types.forEach(function(current) { + current[1].typeName = current[0]; + + // just log these out to a file for fun + if (current[1].type === "enum") { + enums.push(current[1]); + } + else { + types.push(current[1]); + } + }); + + var previous = ""; + enums = _(enums).sortBy("name").reduce(function(enumMemo, enumerable) { + if (previous == enumerable.typeName) { + if (process.env.BUILD_ONLY) { + console.warn('Duplicate definition for enum ' + enumerable.typeName + + ". skipped."); + } + } + else if (!enumerable.fields) { + if (process.env.BUILD_ONLY) { + console.warn('Incomplete definition for enum ' + enumerable.typeName + + ". skipped."); + } + } + else { + enumMemo[enumerable.typeName] = { + typeName: enumerable.typeName.replace(/^git_/, "").replace(/_t$/, ""), + type: "enum", + cType: enumerable.typeName, + isMask: (/_t$/).test(enumerable.typeName), + values: enumerable.fields.map(function(field) { + return { + name: field.name, + value: field.value + } + }) + }; + } + + previous = enumerable.typeName; + return enumMemo; + }, {}).valueOf(); + + // decorate the definitions with required data to build the C++ files + types.forEach(function(typeDef) { + var typeName = typeDef.typeName; + typeDef.cType = typeName; + typeName = typeName.replace("git_", ""); + typeDef.typeName = typeName; + dependencyLookup[typeName] = typeName; + + typeDef.functions = groups[typeName] || []; + helpers.decoratePrimaryType(typeDef, enums); + + groups[typeName] = false; + + typeDef.type = typeDef.hasConstructor ? "struct" : "class"; + + output.push(typeDef); + }); + + // Loop over the groups in case we missed anything (eg the types are missing in the docs); + for (var groupName in groups) { + var groupDef = groups[groupName]; + if (groupDef === false) { + continue; + } + + groupDef = { + functions: groupDef + }; + + groupDef.type = "class"; + groupDef.cType = (descriptor.types[groupName] || {}).cType || groupDef.cType; + + groupDef.typeName = groupName; + dependencyLookup[groupName] = groupName; + helpers.decoratePrimaryType(groupDef, enums); + + output.push(groupDef); + } + + // Calculate dependencies + output.forEach(function (def) { + if (def.ignore) { + return; + } + var dependencies = {}; + var addDependencies = function (prop) { + if (prop.ignore) { + return; + } + + var type = helpers.normalizeCtype(prop.type || prop.cType).replace("git_", ""); + var dependencyFilename = dependencyLookup[type]; + + if (dependencyFilename && dependencyFilename !== def.filename) { + dependencies[dependencyFilename] = dependencyFilename; + } + + (prop.args || []).forEach(addDependencies); + + if (prop.return) { + addDependencies(prop.return); + } + }; + + def.fields.forEach(addDependencies); + def.functions.forEach(addDependencies); + + Object.keys(dependencies).forEach(function (dependencyFilename) { + def.dependencies.push("../include/" + dependencyFilename + ".h"); + }); + + // Additionally provide a friendly name to the actual filename. + def.name = path.basename(def.filename, ".h"); + + def.functions.forEach(function(fn) { + fn.cppClassName = def.cppClassName; + }); + }); + + // Process enums + _(enums).forEach(function(enumerable) { + output.some(function(obj) { + if (enumerable.typeName.indexOf(obj.typeName) == 0) { + enumerable.owner = obj.jsClassName; + } + else if (enumerable.owner) { + return true; + } + }); + + var override = descriptor.enums[enumerable.typeName] || {}; + + enumerable.owner = override.owner || enumerable.owner || "Enums"; + + enumerable.JsName = enumerable.typeName + .replace(new RegExp("^" + enumerable.owner.toLowerCase()), "") + .replace(/^_/, "") + .toUpperCase(); + + enumerable.values.forEach(function(value) { + value.JsName = value.name + .replace(/^GIT_/, "") + .replace(override.removeString || "", "") + .replace(new RegExp("^" + enumerable.owner.toUpperCase()), "") + .replace(/^_/, "") + .replace(new RegExp("^" + enumerable.JsName), "") + .replace(/^_/, "") + .toUpperCase(); + + if (override.values && override.values[value.name]) { + _.merge(value, override.values[value.name]); + } + }); + + _.merge(enumerable, _.omit(override, ["values"])); + + output.push(enumerable); + }); + + output = _.sortBy(output, "typeName"); + + if (process.argv[2] != "--documentation") { + helpers.filterDocumentation(output); + } + + + utils.writeFile("output/idefs.json", output); + +}; + +if (require.main === module) { + module.exports(); +} diff --git a/generate/scripts/generateMissingTests.js b/generate/scripts/generateMissingTests.js new file mode 100644 index 000000000..4aef70f4d --- /dev/null +++ b/generate/scripts/generateMissingTests.js @@ -0,0 +1,72 @@ +const path = require("path"); +const utils = require("./utils"); + +const testFilesPath = "../test/tests"; +const missingFileIgnores = require("../input/ignored-missing-tests"); + +module.exports = function generateMissingTests() { + var output = {}; + + function findMissingTest(idef) { + return new Promise(function(resolve, reject) { + var testFilePath = path.join(testFilesPath, idef.filename + ".js"); + var result = {}; + + var file = utils.readFile(testFilePath); + if (file) { + var fieldsResult = []; + var functionsResult = []; + var fieldIgnores = (missingFileIgnores[idef.filename] || {}).fields; + var functionIgnores = (missingFileIgnores[idef.filename] || {}).functions; + + fieldIgnores = fieldIgnores || []; + functionIgnores = functionIgnores || []; + file = file || ""; + + idef.fields.forEach(function(field) { + if (file.indexOf(field.jsFunctionName) < 0 + && fieldIgnores.indexOf(field.jsFunctionName < 0)) { + fieldsResult.push(field.jsFunctionName); + } + }); + + result.fields = fieldsResult; + + idef.functions.forEach(function(fn) { + if (file.indexOf(fn.jsFunctionName) < 0 + && functionIgnores.indexOf(fn.jsFunctionName) < 0) { + functionsResult.push(fn.jsFunctionName); + } + }); + + result.functions = functionsResult; + } + else { + result.testFileMissing = false; + result.testFilePath = testFilePath; + } + + output[idef.filename] = result; + resolve(); + }); + }; + + const idefs = require("../output/idefs"); + var promises = idefs.map(function(idef) { + return findMissingTest(idef); + }); + + Promise.all(promises).then( + function() { + utils.writeFile("output/missing-tests.json", output); + }, + function(fail) { + console.error(fail); + } + ); + +}; + +if (require.main === module) { + module.exports(); +} diff --git a/generate/scripts/generateNativeCode.js b/generate/scripts/generateNativeCode.js new file mode 100644 index 000000000..56d52672c --- /dev/null +++ b/generate/scripts/generateNativeCode.js @@ -0,0 +1,153 @@ +const path = require("path"); +const promisify = require("promisify-node"); +const fse = promisify(require("fs-extra")); +const exec = require('../../utils/execPromise'); +const utils = require("./utils"); + +module.exports = function generateNativeCode() { + const combyne = require("combyne"); + const js_beautify = require("js-beautify").js_beautify; + const beautify = function (input) { + return js_beautify(input, { + "brace_style": "end-expand", + "max_preserve_newlines": 2, + "preserve_newlines": true, + "indent_size": 2, + "indent_char": " " + }); + }; + + // Customize the delimiters so as to not process `{{{` or `}}}`. + combyne.settings.delimiters = { + START_RAW: "{{=", + END_RAW: "=}}" + }; + + var partials = { + asyncFunction: utils.readFile("templates/partials/async_function.cc"), + callbackHelpers: utils.readFile("templates/partials/callback_helpers.cc"), + convertFromV8: utils.readFile("templates/partials/convert_from_v8.cc"), + convertToV8: utils.readFile("templates/partials/convert_to_v8.cc"), + doc: utils.readFile("templates/partials/doc.cc"), + fields: utils.readFile("templates/partials/fields.cc"), + guardArguments: utils.readFile("templates/partials/guard_arguments.cc"), + syncFunction: utils.readFile("templates/partials/sync_function.cc"), + fieldAccessors: utils.readFile("templates/partials/field_accessors.cc"), + traits: utils.readFile("templates/partials/traits.h") + }; + + var templates = { + class_content: utils.readFile("templates/templates/class_content.cc"), + struct_content: utils.readFile("templates/templates/struct_content.cc"), + class_header: utils.readFile("templates/templates/class_header.h"), + struct_header: utils.readFile("templates/templates/struct_header.h"), + binding: utils.readFile("templates/templates/binding.gyp"), + nodegitCC: utils.readFile("templates/templates/nodegit.cc"), + nodegitJS: utils.readFile("templates/templates/nodegit.js"), + enums: utils.readFile("templates/templates/enums.js") + }; + + var filters = { + and: require("../templates/filters/and"), + argsInfo: require("../templates/filters/args_info"), + cppToV8: require("../templates/filters/cpp_to_v8"), + defaultValue: require("../templates/filters/default_value"), + fieldsInfo: require("../templates/filters/fields_info"), + hasReturnType: require("../templates/filters/has_return_type"), + hasReturnValue: require("../templates/filters/has_return_value"), + isDoublePointer: require("../templates/filters/is_double_pointer"), + isFixedLengthString: require("../templates/filters/is_fixed_length_string"), + isOid: require("../templates/filters/is_oid"), + isPayload: require("../templates/filters/is_payload"), + isPointer: require("../templates/filters/is_pointer"), + isV8Value: require("../templates/filters/is_v8_value"), + jsArgsCount: require("../templates/filters/js_args_count"), + or: require("../templates/filters/or"), + payloadFor: require("../templates/filters/payload_for"), + replace: require("../templates/filters/replace"), + returnsCount: require("../templates/filters/returns_count"), + returnsInfo: require("../templates/filters/returns_info"), + titleCase: require("../templates/filters/title_case"), + toBool: require('../templates/filters/to_bool'), + unPointer: require("../templates/filters/un_pointer"), + upper: require("../templates/filters/upper") + }; + + // Convert Buffers to Combyne templates. + Object.keys(templates).forEach(function(template) { + templates[template] = combyne(templates[template]); + + // Attach all filters to all templates. + Object.keys(filters).forEach(function(filter) { + templates[template].registerFilter(filter, filters[filter]); + }); + }); + + // Attach all partials to select templates. + Object.keys(partials).forEach(function(partial) { + templates.class_header.registerPartial(partial, combyne(partials[partial])); + templates.class_content.registerPartial(partial, combyne(partials[partial])); + templates.struct_header.registerPartial(partial, combyne(partials[partial])); + templates.struct_content.registerPartial(partial, combyne(partials[partial])); + }); + + + // Determine which definitions to actually include in the source code. + // This might not be needed anymore but to be frank I'm not totally positive + const idefs = require("../output/idefs"); + var enabled = idefs.filter(function(idef) { + return !idef.ignore; + }); + + + fse.remove(path.resolve(__dirname, "../../src")).then(function() { + return fse.remove(path.resolve(__dirname, "../../include")); + }).then(function() { + return fse.copy(path.resolve(__dirname, "../templates/manual/include"), path.resolve(__dirname, "../../include")); + }).then(function() { + return fse.copy(path.resolve(__dirname, "../templates/manual/src"), path.resolve(__dirname, "../../src")); + }).then(function() { + // Write out single purpose templates. + utils.writeFile("../binding.gyp", beautify(templates.binding.render(enabled)), "binding.gyp"); + utils.writeFile("../src/nodegit.cc", templates.nodegitCC.render(enabled), "nodegit.cc"); + utils.writeFile("../lib/nodegit.js", beautify(templates.nodegitJS.render(enabled)), "nodegit.js"); + // Write out all the classes. + enabled.forEach(function(idef) { + if (idef.type && idef.type != "enum") { + utils.writeFile( + "../src/" + idef.filename + ".cc", + templates[idef.type + "_content"].render(idef), + idef.type + "_content.cc" + ); + utils.writeFile( + "../include/" + idef.filename + ".h", + templates[idef.type + "_header"].render(idef), + idef.type + "_header.h" + ); + } + }); + + utils.writeFile("../lib/enums.js", beautify(templates.enums.render(enabled)), "enums.js"); + }).then(function() { + return exec("command -v astyle").then(function(astyle) { + if (astyle) { + return exec( + "astyle --options=\".astylerc\" " + + path.resolve(__dirname, "../../src") + "/*.cc " + + path.resolve(__dirname, "../../include") + "/*.h" + ).then(function() { + return exec( + "rm " + + path.resolve(__dirname, "../../src") + "/*.cc.orig " + + path.resolve(__dirname, "../../include") + "/*.h.orig " + ); + }); + } + }, function() {}) + }).catch(console.log); + +}; + +if (require.main === module) { + module.exports(); +} diff --git a/generate/scripts/helpers.js b/generate/scripts/helpers.js new file mode 100644 index 000000000..f3bd3772f --- /dev/null +++ b/generate/scripts/helpers.js @@ -0,0 +1,397 @@ +var callbackTypePattern = /\s*_cb/; + +var utils = require("./utils"); +var _ = require("lodash"); +var path = require("path"); +var fs = require("fs"); + +// TODO: When libgit2's docs include callbacks we should be able to remove this +var callbackDefs = require("../input/callbacks.json"); +var descriptor = require("../input/descriptor.json"); +var libgit2 = require("../input/libgit2-docs.json"); + +var cTypes = libgit2.groups.map(function(group) { return group[0];}); + +var cTypeMappings = { + "char": "String", + "short": "Number", + "int": "Number", + "int16_t": "Number", + "int32_t": "Number", + "int64_t": "Number", + "size_t": "Number", + "uint16_t": "Number", + "uint32_t": "Number", + "uint64_t": "Number" +} + +var collisionMappings = { + "new": "create" +} + +var Helpers = { + normalizeCtype: function(cType) { + return (cType || "") + .toLowerCase() + .replace("const ", "") + .replace("unsigned ", "") + .replace("struct", "") + .replace(utils.doublePointerRegex, "") + .replace(utils.pointerRegex, "") + .trim(); + }, + + cTypeToCppName: function(cType, ownerType) { + var normalizedType = Helpers.normalizeCtype(cType); + if (ownerType && normalizedType != ownerType) { + normalizedType = normalizedType.replace(ownerType, ""); + } + + return cTypeMappings[normalizedType] || utils.titleCase(normalizedType); + }, + + cTypeToJsName: function(cType, ownerType) { + var output = utils.camelCase(Helpers.cTypeToCppName(cType, ownerType).replace(/^Git/, "")); + var mergedPrefixes = ["from", "by"]; + + mergedPrefixes.forEach(function(prefix) { + var reg = new RegExp("(^" + prefix + "|" + utils.titleCase(prefix) + ")([a-z]+)$"); + output = output.replace(reg, function(all, prefixMatch, otherWord) { + return prefixMatch + utils.titleCase(otherWord); + }); + }); + + output = output.replace(/([a-z])Str$/, "$1String") + return output; + }, + + isConstructorFunction: function(cType, fnName) { + var initFnName = cType.split('_'); + + initFnName.splice(-1, 0, "init"); + initFnName = initFnName.join('_'); + + return initFnName === fnName; + }, + + hasConstructor: function(type, normalizedType) { + return type.used + && type.used.needs + && type.used.needs.some(function (fnName) { + return Helpers.isConstructorFunction(normalizedType, fnName); + }); + }, + + isCallbackFunction: function(cType) { + return callbackTypePattern.test(cType); + }, + + isPayloadFor: function(cbField, payloadName) { + return payloadName && ~payloadName.indexOf("_payload") + && Helpers.isCallbackFunction(cbField.cType) + && ~cbField.name.indexOf(payloadName.replace("_payload", "")); + }, + + getLibgitType: function(normalizedType, types) { + var libgitType; + + types.some(function (type) { + if (type[0] === normalizedType) { + libgitType = type[1]; + return true; + } + }); + + return libgitType; + }, + + processCallback: function(field) { + field.isCallbackFunction = true; + + var callbackDef = callbackDefs[field.type] || callbackDefs[field.cType]; + if (callbackDef) { + _.merge(field, callbackDef); + } + else { + if (process.env.BUILD_ONLY) { + console.warn("Couldn't find callback definition for " + field.type); + } + } + }, + + processPayload: function(field, allFields) { + if (field.name === "payload") { + field.payloadFor = "*"; + field.globalPayload = true; + field.isOptional = true; + } + else { + var cbFieldName; + + allFields.some(function (cbField) { + if (Helpers.isPayloadFor(cbField, field.name)) { + cbFieldName = cbField.name; + return true; + } + }); + + if (cbFieldName) { + field.payloadFor = cbFieldName; + field.isOptional = true; + } + } + }, + + // returns the libgittype found in types + decorateLibgitType: function(type, types, enums) { + var normalizedType = Helpers.normalizeCtype(type.cType); + var libgitType = Helpers.getLibgitType(normalizedType, types); + + if (libgitType) { + type.isLibgitType = true; + type.isEnum = libgitType.type === "enum"; + type.hasConstructor = Helpers.hasConstructor(type, normalizedType); + + // there are no enums at the struct level currently, but we still need to override function args + if (type.isEnum) { + type.cppClassName = "Number"; + type.jsClassName = "Number"; + if (enums[type.cType]) { + type.isMask = enums[type.cType].isMask || false + } + } + + // we don't want to overwrite the c type of the passed in type + _.merge(type, descriptor.types[normalizedType.replace("git_", "")] || {}, { cType: type.cType }); + } + + return libgitType; + }, + + decoratePrimaryType: function(typeDef, enums) { + var typeDefOverrides = descriptor.types[typeDef.typeName] || {}; + var partialOverrides = _.omit(typeDefOverrides, ["fields", "functions"]); + + typeDef.cType = typeDef.cType || null; + typeDef.cppClassName = Helpers.cTypeToCppName(typeDef.cType || "git_" + typeDef.typeName); + typeDef.jsClassName = utils.titleCase(Helpers.cTypeToJsName(typeDef.cType || "git_" + typeDef.typeName)); + typeDef.filename = typeDef.typeName; + typeDef.isLibgitType = true; + typeDef.dependencies = []; + typeDef.selfFreeing = Boolean(typeDefOverrides.selfFreeing); + + if (typeDefOverrides.freeFunctionName) { + typeDef.freeFunctionName = typeDefOverrides.freeFunctionName; + } else if (typeDef.type === 'struct') { + typeDef.freeFunctionName = 'free'; + } + + typeDef.fields = typeDef.fields || []; + typeDef.fields.forEach(function (field, index, allFields) { + var fieldOverrides = typeDefOverrides.fields || {}; + Helpers.decorateField(field, allFields, fieldOverrides[field.name] || {}, enums); + }); + + typeDef.needsForwardDeclaration = typeDef.decl === typeDef.cType; + + var normalizedType = Helpers.normalizeCtype(typeDef.cType); + typeDef.hasConstructor = Helpers.hasConstructor(typeDef, normalizedType); + + typeDef.functions = (typeDef.functions).map(function(fn) { + var fnDef = libgit2.functions[fn]; + fnDef.cFunctionName = fn; + return fnDef; + }); + + var typeDefOverrides = descriptor.types[typeDef.typeName] || {}; + var functionOverrides = typeDefOverrides.functions || {}; + typeDef.functions.forEach(function(fnDef) { + Helpers.decorateFunction(fnDef, typeDef, functionOverrides[fnDef.cFunctionName] || {}, enums); + }); + + _.merge(typeDef, partialOverrides); + }, + + decorateField: function(field, allFields, fieldOverrides, enums) { + var normalizeType = Helpers.normalizeCtype(field.type); + + field.cType = field.type; + field.cppFunctionName = utils.titleCase(field.name); + field.jsFunctionName = utils.camelCase(field.name); + field.cppClassName = Helpers.cTypeToCppName(field.type); + field.jsClassName = utils.titleCase(Helpers.cTypeToJsName(field.type)); + field.ownedByThis = true; + + if (Helpers.isCallbackFunction(field.cType)) { + Helpers.processCallback(field); + + var argOverrides = fieldOverrides.args || {}; + field.args = field.args || []; + field.args.forEach(function (arg) { + Helpers.decorateArg(arg, field.args, null, null, argOverrides[arg.name] || {}, enums); + }); + } + else { + field.isCallbackFunction = false; + Helpers.processPayload(field, allFields); + if (field.payloadFor) { + return; + } + } + + Helpers.decorateLibgitType(field, libgit2.types, enums); + _.merge(field, fieldOverrides); + }, + + decorateArg: function(arg, allArgs, typeDef, fnDef, argOverrides, enums) { + var type = argOverrides.cType || argOverrides.type || arg.cType || arg.type; + var normalizedType = Helpers.normalizeCtype(type); + + arg.cType = type; + arg.cppClassName = Helpers.cTypeToCppName(arg.cType); + arg.jsClassName = utils.titleCase(Helpers.cTypeToJsName(arg.cType)); + + Helpers.decorateLibgitType(arg, libgit2.types, enums); + + // Some arguments can be callbacks + if (Helpers.isCallbackFunction(type)) { + Helpers.processCallback(arg); + + var callBackArgOverrides = argOverrides.args || {}; + arg.args = arg.args || []; + arg.args.forEach(function (argForCallback) { + Helpers.decorateArg(argForCallback, arg.args, null, null, callBackArgOverrides[argForCallback.name] || {}, enums); + }); + } + else if (typeDef && fnDef) { + Helpers.processPayload(arg, allArgs); + if(arg.payloadFor) { + return; + } + + // Mark all of the args that are either returns or are the object + // itself and determine if this function goes on the prototype + // or is a constructor method. + arg.isReturn = arg.name === "out" || (utils.isDoublePointer(arg.type) && normalizedType == typeDef.cType); + if (typeof arg.isSelf == 'undefined') { + arg.isSelf = utils.isPointer(arg.type) && + normalizedType == typeDef.cType && + arg.cppClassName !== "Array" && + argOverrides.cppClassName !== "Array" && + _.every(allArgs, function(_arg) { return !_arg.isSelf; }); + } + if (arg.isReturn && fnDef.return && fnDef.return.type === "int") { + fnDef.return.isErrorCode = true; + fnDef.isAsync = true; + } + + if (arg.isReturn && arg.isSelf) { + arg.isSelf = false; + fnDef.isConstructorMethod = true; + } + else if (arg.isSelf) { + fnDef.isPrototypeMethod = true; + } + } + + _.merge(arg, argOverrides); + }, + + decorateFunction: function(fnDef, typeDef, fnOverrides, enums) { + var key = fnDef.cFunctionName; + + // if this is the free function for the class, make the ref on the class + // and then return since we don't want the free functions publicly + // available + if (key == typeDef.cType + "_free") { + typeDef.freeFunctionName = key; + } + + fnDef.cppFunctionName = Helpers.cTypeToCppName(key, "git_" + typeDef.typeName); + fnDef.jsFunctionName = Helpers.cTypeToJsName(key, "git_" + typeDef.typeName); + + if (fnDef.cppFunctionName == typeDef.cppClassName) { + fnDef.cppFunctionName = fnDef.cppFunctionName.replace("Git", ""); + } + + var argOverrides = fnOverrides.args || {}; + fnDef.args.forEach(function(arg) { + Helpers.decorateArg(arg, fnDef.args, typeDef, fnDef, argOverrides[arg.name] || {}, enums); + + // if a function has any callbacks then it MUST be async + if (arg.isCallbackFunction) { + fnDef.isAsync = true; + } + }); + + if (fnDef.cFile) { + fnDef.implementation = fs.readFileSync(path.resolve(fnDef.cFile), 'utf8'); + } + + if (fnDef.return) { + Helpers.decorateArg(fnDef.return, fnDef.args, typeDef, fnDef, fnOverrides.return || {}, enums); + } + + _(collisionMappings).forEach(function(newName, collidingName) { + if (fnDef.cppFunctionName == utils.titleCase(collidingName)) { + fnDef.cppFunctionName = utils.titleCase(newName); + } + + if (fnDef.jsFunctionName == utils.camelCase(collidingName)) { + fnDef.jsFunctionName = utils.camelCase(newName); + } + }); + + if ("git_" + typeDef.typeName == fnDef.cFunctionName) { + fnDef.useAsOnRootProto = true; + } + _.merge(fnDef, _.omit(fnOverrides, "args", "return")); + }, + + filterIgnored: function (arr, callback) { + if (!arr) { + return; + } + for (var i = arr.length - 1; i >= 0; i--) { + if (arr[i].ignore) { + arr.splice(i, 1); + } + else if (callback) { + callback(arr[i]); + } + } + }, + + deleteProperties: function(obj) { + delete obj.line; + delete obj.lineto; + delete obj.block; + delete obj.description; + delete obj.comments; + delete obj.tdef; + delete obj.decl; + delete obj.comments; + delete obj.argline; + delete obj.sig; + }, + + filterDocumentation: function(idefs) { + Helpers.filterIgnored(idefs, function (idef) { + Helpers.deleteProperties(idef); + + Helpers.filterIgnored(idef.fields, Helpers.deleteProperties); + + + Helpers.filterIgnored(idef.functions, function (fn) { + Helpers.deleteProperties(fn); + + Helpers.filterIgnored(fn.args, function(arg) { + Helpers.deleteProperties(arg); + delete arg.functions; + }); + }); + }); + } +}; + +module.exports = Helpers; diff --git a/generate/scripts/utils.js b/generate/scripts/utils.js new file mode 100644 index 000000000..6868bf653 --- /dev/null +++ b/generate/scripts/utils.js @@ -0,0 +1,75 @@ +const fse = require("fs-extra"); + +const fs = require("fs"); +const path = require("path"); + +// Make a locally bound path joiner, (bound to generate). +var local = path.join.bind(null, __dirname, "../"); + +var util = { + pointerRegex: /\s*\*\s*/, + doublePointerRegex: /\s*\*\*\s*/, + + readFile: function(file) { + try { + return fs.readFileSync(local(file)).toString(); + } + catch (unhandledException) { + return ""; + } + }, + + writeFile: function(file, content, header) { + try { + var file = local(file); + if (typeof content == "object") { + content = JSON.stringify(content, null, 2) + } + + if (header) { + var commentPrefix = ~header.indexOf('.gyp') ? '#' : '//' + content = commentPrefix + + " This is a generated file, modify: generate/templates/" + + header + + "\n\n" + + content; + } + + fse.ensureFileSync(file); + fse.writeFileSync(file, content); + return true; + } + catch (exception) { + return false; + } + }, + + titleCase: function(str) { + return str.split(/_|\//).map(function(val, index) { + if (val.length) { + return val[0].toUpperCase() + val.slice(1); + } + + return val; + }).join(""); + }, + + camelCase: function(str) { + return str.split(/_|\//).map(function(val, index) { + return (index >= 1 + ? val[0].toUpperCase() + val.slice(1) + : val[0].toLowerCase() + val.slice(1)); + }).join(""); + }, + + isPointer: function(type) { + return util.pointerRegex.test(type) || util.doublePointerRegex.test(type); + }, + + isDoublePointer: function(type) { + return util.doublePointerRegex.test(type); + } + +}; + +module.exports = util; diff --git a/generate/templates/filters/and.js b/generate/templates/filters/and.js new file mode 100644 index 000000000..042631623 --- /dev/null +++ b/generate/templates/filters/and.js @@ -0,0 +1,3 @@ +module.exports = function(value, other) { + return value && other; +}; diff --git a/generate/templates/filters/args_info.js b/generate/templates/filters/args_info.js new file mode 100644 index 000000000..1382c67d6 --- /dev/null +++ b/generate/templates/filters/args_info.js @@ -0,0 +1,42 @@ +module.exports = function(args) { + var result = [], + cArg, + jsArg; + + for(cArg = 0, jsArg = 0; cArg < args.length; cArg++) { + var arg = args[cArg]; + + if (!arg.isReturn && !arg.isSelf) { + arg.isJsArg = true; + arg.jsArg = jsArg; + + jsArg++; + } + + if (cArg === args.length -1) { + arg.lastArg = true; + } + else { + arg.lastArg = false; + } + + arg.cArg = cArg; + arg.isCppClassStringOrArray = ~["String", "Array"].indexOf(arg.cppClassName); + arg.isConst = ~arg.cType.indexOf("const "); + + // if we have a callback then we also need the corresponding payload for that callback + if (arg.isCallbackFunction) { + var payload = args.filter(function(payload) { + return payload.payloadFor == arg.name || payload.payloadFor == '*'; + })[0]; + + if (payload) { + arg.payload = payload; + } + } + + result.push(arg); + } + + return result; +}; diff --git a/generate/templates/filters/cpp_to_v8.js b/generate/templates/filters/cpp_to_v8.js new file mode 100644 index 000000000..96b2b0ba6 --- /dev/null +++ b/generate/templates/filters/cpp_to_v8.js @@ -0,0 +1,11 @@ +var isV8Value = require("./is_v8_value"); + +module.exports = function(cppClassName) { + if (cppClassName === "Integer") { + cppClassName = "Number"; + } else if (cppClassName === "Wrapper"){ + cppClassName = "String"; + } + + return isV8Value(cppClassName) ? cppClassName : "Object"; +}; diff --git a/generate/templates/filters/default_value.js b/generate/templates/filters/default_value.js new file mode 100644 index 000000000..42682549e --- /dev/null +++ b/generate/templates/filters/default_value.js @@ -0,0 +1,3 @@ +module.exports = function(cType) { + return cType === "git_otype" ? "GIT_OBJ_ANY" : "0"; +}; diff --git a/generate/templates/filters/fields_info.js b/generate/templates/filters/fields_info.js new file mode 100644 index 000000000..8ec8a930b --- /dev/null +++ b/generate/templates/filters/fields_info.js @@ -0,0 +1,17 @@ +module.exports = function(fields) { + var result = []; + + fields.forEach(function (field){ + var fieldInfo = {}; + + fieldInfo.__proto__ = field; + + fieldInfo.parsedName = field.name || "result"; + fieldInfo.isCppClassIntType = ~["Uint32", "Int32"].indexOf(field.cppClassName); + fieldInfo.parsedClassName = (field.cppClassName || '').toLowerCase() + "_t"; + + result.push(fieldInfo); + }); + + return result; +}; diff --git a/generate/templates/filters/has_return_type.js b/generate/templates/filters/has_return_type.js new file mode 100644 index 000000000..2d21a0e66 --- /dev/null +++ b/generate/templates/filters/has_return_type.js @@ -0,0 +1,7 @@ +module.exports = function(functionInfo) { + if (functionInfo.return) { + return functionInfo.return.cType != "void" || functionInfo.return.isErrorCode; + } + + return false; +}; diff --git a/generate/templates/filters/has_return_value.js b/generate/templates/filters/has_return_value.js new file mode 100644 index 000000000..bbe371808 --- /dev/null +++ b/generate/templates/filters/has_return_value.js @@ -0,0 +1,9 @@ +module.exports = function(fn) { + if (fn.return + && !fn.return.isErrorCode + && fn.return.cType != "void") { + return true + } + + return false; +}; diff --git a/generate/templates/filters/is_double_pointer.js b/generate/templates/filters/is_double_pointer.js new file mode 100644 index 000000000..9d1246ab4 --- /dev/null +++ b/generate/templates/filters/is_double_pointer.js @@ -0,0 +1,3 @@ +module.exports = function(cType) { + return /\s*\*\*\s*/.test(cType); +}; diff --git a/generate/templates/filters/is_fixed_length_string.js b/generate/templates/filters/is_fixed_length_string.js new file mode 100644 index 000000000..a2baa43bf --- /dev/null +++ b/generate/templates/filters/is_fixed_length_string.js @@ -0,0 +1,3 @@ +module.exports = function(field) { + return field.cppClassName == "String" && field.size; +}; diff --git a/generate/templates/filters/is_oid.js b/generate/templates/filters/is_oid.js new file mode 100644 index 000000000..1d13f700f --- /dev/null +++ b/generate/templates/filters/is_oid.js @@ -0,0 +1,3 @@ +module.exports = function(arg) { + return arg.cppClassName == "GitOid" && !arg.isSelf && !arg.isReturn; +}; diff --git a/generate/templates/filters/is_payload.js b/generate/templates/filters/is_payload.js new file mode 100644 index 000000000..510cec95e --- /dev/null +++ b/generate/templates/filters/is_payload.js @@ -0,0 +1,3 @@ +module.exports = function(field) { + return field.name == 'payload' || field.payload || field.payloadFor; +}; diff --git a/generate/templates/filters/is_pointer.js b/generate/templates/filters/is_pointer.js new file mode 100644 index 000000000..a5dbb053e --- /dev/null +++ b/generate/templates/filters/is_pointer.js @@ -0,0 +1,3 @@ +module.exports = function(cType) { + return /\s*\*\s*/.test(cType); +}; diff --git a/generate/templates/filters/is_v8_value.js b/generate/templates/filters/is_v8_value.js new file mode 100644 index 000000000..f773ff6a2 --- /dev/null +++ b/generate/templates/filters/is_v8_value.js @@ -0,0 +1,14 @@ +var v8 = [ + "Boolean", + "Number", + "String", + "Integer", + "Int32", + "Uint32", + "Date", + "Function" +]; + +module.exports = function(cppClassName) { + return v8.indexOf(cppClassName) > -1; +}; diff --git a/generate/templates/filters/js_args_count.js b/generate/templates/filters/js_args_count.js new file mode 100644 index 000000000..5be437f41 --- /dev/null +++ b/generate/templates/filters/js_args_count.js @@ -0,0 +1,18 @@ +module.exports = function(args) { + var cArg, + jsArg; + + if (!args) { + return 0; + } + + for(cArg = 0, jsArg = 0; cArg < args.length; cArg++) { + var arg = args[cArg]; + + if (!arg.isReturn && !arg.isSelf && !arg.isPayload) { + jsArg++; + } + } + + return jsArg; +}; diff --git a/generate/templates/filters/or.js b/generate/templates/filters/or.js new file mode 100644 index 000000000..fe02c2a87 --- /dev/null +++ b/generate/templates/filters/or.js @@ -0,0 +1,3 @@ +module.exports = function(value, other) { + return value || other; +}; diff --git a/generate/templates/filters/payload_for.js b/generate/templates/filters/payload_for.js new file mode 100644 index 000000000..412cf6123 --- /dev/null +++ b/generate/templates/filters/payload_for.js @@ -0,0 +1,14 @@ +module.exports = function(fields, payloadForField) { + fields = fields || []; + + var result = fields.filter(function (field) { + return field.payloadFor && (field.payloadFor === payloadForField || field.payloadFor === "*"); + }); + + if (result.length > 0) { + return result[0].name; + } + else { + return ""; + } +}; diff --git a/generate/templates/filters/replace.js b/generate/templates/filters/replace.js new file mode 100644 index 000000000..15ed9c71f --- /dev/null +++ b/generate/templates/filters/replace.js @@ -0,0 +1,3 @@ +module.exports = function(value, find, replace) { + return value.replace(find, replace); +}; diff --git a/generate/templates/filters/returns_count.js b/generate/templates/filters/returns_count.js new file mode 100644 index 000000000..9ba92aec4 --- /dev/null +++ b/generate/templates/filters/returns_count.js @@ -0,0 +1,15 @@ +module.exports = function(fn) { + var args = fn.args || []; + var result = args.reduce(function (currentValue, arg) { + return currentValue + (arg.isReturn ? 1 : 0); + }, 0); + + if (!result + && fn.return + && !fn.return.isErrorCode + && fn.return.cType != "void") { + result = 1; + } + + return result; +}; diff --git a/generate/templates/filters/returns_info.js b/generate/templates/filters/returns_info.js new file mode 100644 index 000000000..7eaf26e7a --- /dev/null +++ b/generate/templates/filters/returns_info.js @@ -0,0 +1,47 @@ +var isPointer = require("./is_pointer"); + +module.exports = function(fn, argReturnsOnly, isAsync) { + var result = []; + var args = fn.args || []; + + args.forEach(function (arg) { + if (!arg.isReturn) return; + + var return_info = {}; + + return_info.__proto__ = arg; + + return_info.parsedName = isAsync ? "baton->" + return_info.name : return_info.name; + return_info.isCppClassIntType = ~['Uint32', 'Int32'].indexOf(return_info.cppClassName); + return_info.needsDereference + = isAsync && + return_info.cppClassName == "Number" && + isPointer(return_info.cType); + return_info.parsedClassName = (return_info.cppClassName || '').toLowerCase() + "_t"; + return_info.returnNameOrName = return_info.returnName || return_info.name; + return_info.jsOrCppClassName = return_info.jsClassName || return_info.cppClassName; + return_info.isOutParam = true; + + result.push(return_info); + }); + + if (!result.length + && !argReturnsOnly + && fn.return + && !fn.return.isErrorCode + && fn.return.cType != "void") { + var return_info = {}; + + return_info.__proto__ = fn.return; + return_info.parsedName = return_info.name && isAsync ? "baton->" + return_info.name : "result"; + return_info.isCppClassIntType = ~['Uint32', 'Int32'].indexOf(return_info.cppClassName); + return_info.parsedClassName = (return_info.cppClassName || '').toLowerCase() + "_t"; + return_info.returnNameOrName = return_info.returnName || return_info.name; + return_info.jsOrCppClassName = return_info.jsClassName || return_info.cppClassName; + + result.push(return_info); + } + + + return result; +}; diff --git a/generate/templates/filters/title_case.js b/generate/templates/filters/title_case.js new file mode 100644 index 000000000..8c22b32bb --- /dev/null +++ b/generate/templates/filters/title_case.js @@ -0,0 +1,9 @@ +module.exports = function(str) { + return str.split(/_|\//).map(function(val, index) { + if (val.length) { + return val[0].toUpperCase() + val.slice(1); + } + + return val; + }).join(""); +}; diff --git a/generate/templates/filters/to_bool.js b/generate/templates/filters/to_bool.js new file mode 100644 index 000000000..dd6063f28 --- /dev/null +++ b/generate/templates/filters/to_bool.js @@ -0,0 +1,3 @@ +module.exports = function(value) { + return !!value; +}; diff --git a/generate/templates/filters/un_pointer.js b/generate/templates/filters/un_pointer.js new file mode 100644 index 000000000..99382f36f --- /dev/null +++ b/generate/templates/filters/un_pointer.js @@ -0,0 +1,3 @@ +module.exports = function(cType) { + return cType.replace(/\s*\*\s*$/, ""); +}; diff --git a/generate/templates/filters/upper.js b/generate/templates/filters/upper.js new file mode 100644 index 000000000..bf0aa534b --- /dev/null +++ b/generate/templates/filters/upper.js @@ -0,0 +1,3 @@ +module.exports = function(value) { + return value.toUpperCase(); +}; diff --git a/generate/templates/manual/include/async_baton.h b/generate/templates/manual/include/async_baton.h new file mode 100644 index 000000000..fee87c4c1 --- /dev/null +++ b/generate/templates/manual/include/async_baton.h @@ -0,0 +1,48 @@ +#ifndef ASYNC_BATON +#define ASYNC_BATON + +#include +#include + +#include "lock_master.h" +#include "functions/sleep_for_ms.h" + +// Base class for Batons used for callbacks (for example, +// JS functions passed as callback parameters, +// or field properties of configuration objects whose values are callbacks) +struct AsyncBaton { + uv_async_t req; + + bool done; +}; + +template +struct AsyncBatonWithResult : public AsyncBaton { + ResultT result; + ResultT defaultResult; // result returned if the callback doesn't return anything valid + + AsyncBatonWithResult(const ResultT &defaultResult) + : defaultResult(defaultResult) { + } + + ResultT ExecuteAsync(uv_async_cb asyncCallback) { + result = 0; + req.data = this; + done = false; + + uv_async_init(uv_default_loop(), &req, asyncCallback); + { + LockMaster::TemporaryUnlock temporaryUnlock; + + uv_async_send(&req); + + while(!done) { + sleep_for_ms(1); + } + } + + return result; + } +}; + +#endif diff --git a/generate/templates/manual/include/async_libgit2_queue_worker.h b/generate/templates/manual/include/async_libgit2_queue_worker.h new file mode 100644 index 000000000..f3ddf2fb3 --- /dev/null +++ b/generate/templates/manual/include/async_libgit2_queue_worker.h @@ -0,0 +1,33 @@ +#ifndef ASYNC_LIBGIT2_QUEUE_WORKER_H +#define ASYNC_LIBGIT2_QUEUE_WORKER_H + +#include +#include +#include "../include/thread_pool.h" +#include "../include/nodegit.h" + + +// Runs WorkComplete of the scheduled AsyncWorker, +// and destroys it. This is run in the uv_default_loop event loop. +NAN_INLINE void AsyncLibgit2Complete (void* data) { + Nan::AsyncWorker *worker = static_cast(data); + worker->WorkComplete(); + worker->Destroy(); +} + +// Runs Execute of the scheduled AyncWorker on the dedicated libgit2 thread / +// event loop, and schedules the WorkComplete callback to run on the +// uv_default_loop event loop +NAN_INLINE void AsyncLibgit2Execute (void *vworker) { + // execute the worker + Nan::AsyncWorker *worker = static_cast(vworker); + worker->Execute(); +} + +// Schedules the AsyncWorker to run on the dedicated libgit2 thread / event loop, +// and on completion AsyncLibgit2Complete on the default loop +NAN_INLINE void AsyncLibgit2QueueWorker (Nan::AsyncWorker* worker) { + libgit2ThreadPool.QueueWork(AsyncLibgit2Execute, AsyncLibgit2Complete, worker); +} + +#endif diff --git a/generate/templates/manual/include/callback_wrapper.h b/generate/templates/manual/include/callback_wrapper.h new file mode 100644 index 000000000..b23a7bb36 --- /dev/null +++ b/generate/templates/manual/include/callback_wrapper.h @@ -0,0 +1,60 @@ +#ifndef CALLBACK_WRAPPER_H +#define CALLBACK_WRAPPER_H + +#include +#include + +using namespace v8; +using namespace node; + +class CallbackWrapper { + Nan::Callback* jsCallback; + + // throttling data, used for callbacks that need to be throttled + int throttle; // in milliseconds - if > 0, calls to the JS callback will be throttled + uint64_t lastCallTime; + +public: + CallbackWrapper() { + jsCallback = NULL; + lastCallTime = 0; + throttle = 0; + } + + ~CallbackWrapper() { + SetCallback(NULL); + } + + bool HasCallback() { + return jsCallback != NULL; + } + + Nan::Callback* GetCallback() { + return jsCallback; + } + + void SetCallback(Nan::Callback* callback, int throttle = 0) { + if(jsCallback) { + delete jsCallback; + } + jsCallback = callback; + this->throttle = throttle; + } + + bool WillBeThrottled() { + if(!throttle) { + return false; + } + // throttle if needed + uint64_t now = uv_hrtime(); + if(lastCallTime > 0 && now < lastCallTime + throttle * 1000000) { + // throttled + return true; + } else { + lastCallTime = now; + return false; + } + } +}; + +#endif diff --git a/generate/templates/manual/include/convenient_hunk.h b/generate/templates/manual/include/convenient_hunk.h new file mode 100644 index 000000000..63d77d900 --- /dev/null +++ b/generate/templates/manual/include/convenient_hunk.h @@ -0,0 +1,75 @@ +#ifndef CONVENIENTHUNK_H +#define CONVENIENTHUNK_H +// generated from class_header.h +#include +#include + +#include "async_baton.h" +#include "promise_completion.h" + +extern "C" { +#include +} + +#include "../include/typedefs.h" + +struct HunkData { + git_diff_hunk hunk; + std::vector *lines; + size_t numLines; +}; + +void HunkDataFree(HunkData *hunk); + +using namespace node; +using namespace v8; + +class ConvenientHunk : public Nan::ObjectWrap { + public: + static Nan::Persistent constructor_template; + static void InitializeComponent (Local target); + + static Local New(void *raw); + + HunkData *GetValue(); + char *GetHeader(); + size_t GetSize(); + + private: + ConvenientHunk(HunkData *hunk); + ~ConvenientHunk(); + + HunkData *hunk; + + static NAN_METHOD(JSNewFunction); + static NAN_METHOD(Size); + + static NAN_METHOD(OldStart); + static NAN_METHOD(OldLines); + static NAN_METHOD(NewStart); + static NAN_METHOD(NewLines); + static NAN_METHOD(HeaderLen); + static NAN_METHOD(Header); + + struct LinesBaton { + HunkData *hunk; + std::vector *lines; + }; + class LinesWorker : public Nan::AsyncWorker { + public: + LinesWorker( + LinesBaton *_baton, + Nan::Callback *callback + ) : Nan::AsyncWorker(callback) + , baton(_baton) {}; + ~LinesWorker() {}; + void Execute(); + void HandleOKCallback(); + + private: + LinesBaton *baton; + }; + static NAN_METHOD(Lines); +}; + +#endif diff --git a/generate/templates/manual/include/convenient_patch.h b/generate/templates/manual/include/convenient_patch.h new file mode 100644 index 000000000..d6f6c69a1 --- /dev/null +++ b/generate/templates/manual/include/convenient_patch.h @@ -0,0 +1,108 @@ +#ifndef CONVENIENTPATCH_H +#define CONVENIENTPATCH_H +// generated from class_header.h +#include +#include + +#include "async_baton.h" +#include "promise_completion.h" + +extern "C" { +#include +} + +#include "../include/typedefs.h" +#include "../include/convenient_hunk.h" + +struct ConvenientLineStats { + size_t context; + size_t additions; + size_t deletions; +}; + +struct PatchData { + ConvenientLineStats lineStats; + git_delta_t status; + git_diff_file new_file; + git_diff_file old_file; + std::vector *hunks; + size_t numHunks; +}; + +PatchData *createFromRaw(git_patch *raw); +void PatchDataFree(PatchData *patch); + +using namespace node; +using namespace v8; + +class ConvenientPatch : public Nan::ObjectWrap { + public: + static Nan::Persistent constructor_template; + static void InitializeComponent (Local target); + + static Local New(void *raw); + + ConvenientLineStats GetLineStats(); + git_delta_t GetStatus(); + git_diff_file GetOldFile(); + git_diff_file GetNewFile(); + size_t GetNumHunks(); + PatchData *GetValue(); + + private: + ConvenientPatch(PatchData *raw); + ~ConvenientPatch(); + + PatchData *patch; + + static NAN_METHOD(JSNewFunction); + + // patch methods + static NAN_METHOD(LineStats); + + // hunk methods + static NAN_METHOD(Size); + + struct HunksBaton { + PatchData *patch; + std::vector *hunks; + }; + class HunksWorker : public Nan::AsyncWorker { + public: + HunksWorker( + HunksBaton *_baton, + Nan::Callback *callback + ) : Nan::AsyncWorker(callback) + , baton(_baton) {}; + ~HunksWorker() {}; + void Execute(); + void HandleOKCallback(); + + private: + HunksBaton *baton; + }; + + static NAN_METHOD(Hunks); + + // delta methods + static NAN_METHOD(OldFile); + static NAN_METHOD(NewFile); + + // convenient status methods + static NAN_METHOD(Status); + static NAN_METHOD(IsUnmodified); + static NAN_METHOD(IsAdded); + static NAN_METHOD(IsDeleted); + static NAN_METHOD(IsModified); + static NAN_METHOD(IsRenamed); + static NAN_METHOD(IsCopied); + static NAN_METHOD(IsIgnored); + static NAN_METHOD(IsUntracked); + static NAN_METHOD(IsTypeChange); + static NAN_METHOD(IsUnreadable); + static NAN_METHOD(IsConflicted); + + // Hunk methods +}; + +#endif diff --git a/include/functions/copy.h b/generate/templates/manual/include/functions/copy.h similarity index 78% rename from include/functions/copy.h rename to generate/templates/manual/include/functions/copy.h index 9cc8659ef..f7f942475 100644 --- a/include/functions/copy.h +++ b/generate/templates/manual/include/functions/copy.h @@ -14,6 +14,8 @@ const git_index_time *git_index_time_dup(const git_index_time *arg); const git_time *git_time_dup(const git_time *arg); const git_diff_delta *git_diff_delta_dup(const git_diff_delta *arg); const git_diff_file *git_diff_file_dup(const git_diff_file *arg); -const git_diff_range *git_diff_range_dup(const git_diff_range *arg); + +void git_time_dup(git_time **out, const git_time *arg); +void git_transfer_progress_dup(git_transfer_progress **out, const git_transfer_progress *arg); #endif diff --git a/generate/templates/manual/include/functions/sleep_for_ms.h b/generate/templates/manual/include/functions/sleep_for_ms.h new file mode 100644 index 000000000..903299268 --- /dev/null +++ b/generate/templates/manual/include/functions/sleep_for_ms.h @@ -0,0 +1,6 @@ +#ifndef SLEEP_FOR_MS_H +#define SLEEP_FOR_MS_H + +void sleep_for_ms(int milliseconds); + +#endif diff --git a/generate/templates/manual/include/git_buf_converter.h b/generate/templates/manual/include/git_buf_converter.h new file mode 100644 index 000000000..e2ea08bba --- /dev/null +++ b/generate/templates/manual/include/git_buf_converter.h @@ -0,0 +1,16 @@ +#ifndef STR_ARRAY_H +#define STR_ARRAY_H + +#include + +#include "nan.h" +#include "git2/strarray.h" + +using namespace v8; + +class StrArrayConverter { + public: + static git_strarray *Convert (Local val); +}; + +#endif diff --git a/generate/templates/manual/include/init_ssh2.h b/generate/templates/manual/include/init_ssh2.h new file mode 100644 index 000000000..d7f92b71a --- /dev/null +++ b/generate/templates/manual/include/init_ssh2.h @@ -0,0 +1,6 @@ +#ifndef INIT_SSH2 +#define INIT_SSH2 + +void init_ssh2(); + +#endif diff --git a/generate/templates/manual/include/lock_master.h b/generate/templates/manual/include/lock_master.h new file mode 100644 index 000000000..fde38825b --- /dev/null +++ b/generate/templates/manual/include/lock_master.h @@ -0,0 +1,201 @@ +#ifndef LOCK_MASTER_H +#define LOCK_MASTER_H + +#include + +class LockMasterImpl; + +class LockMaster { +public: + enum Status { + Disabled = 0, + EnabledForAsyncOnly, + Enabled + }; + +private: + static Status status; + + LockMasterImpl *impl; + + template + void AddLocks(const T *t) { + // by default, don't lock anything + } + + // base case for variadic template unwinding + void AddParameters() { + } + + // processes a single parameter, then calls recursively on the rest + template + void AddParameters(const T *t, const Types*... args) { + if(t) { + AddLocks(t); + } + AddParameters(args...); + } + + void ConstructorImpl(); + void DestructorImpl(); + void ObjectToLock(const void *); + void ObjectsToLockAdded(); +public: + + // we lock on construction + template LockMaster(bool asyncAction, const Types*... types) { + if((status == Disabled) || ((status == EnabledForAsyncOnly) && !asyncAction)) { + impl = NULL; + return; + } + + ConstructorImpl(); + AddParameters(types...); + ObjectsToLockAdded(); + } + + // and unlock on destruction + ~LockMaster() { + if(!impl) { + return; + } + DestructorImpl(); + } + + // TemporaryUnlock unlocks the LockMaster currently registered on the thread, + // and re-locks it on destruction. + class TemporaryUnlock { + LockMasterImpl *impl; + + void ConstructorImpl(); + void DestructorImpl(); + public: + TemporaryUnlock() { + // We can't return here if disabled + // It's possible that a LockMaster was fully constructed and registered + // before the thread safety was disabled. + // So we rely on ConstructorImpl to abort if there is no registered LockMaster + ConstructorImpl(); + } + ~TemporaryUnlock() { + if(!impl) { + return; + } + DestructorImpl(); + } + }; + + static void Initialize(); + + // Enables the thread safety system + static void Enable() { + status = Enabled; + } + + static void SetStatus(Status status) { + LockMaster::status = status; + } + + static void Disable() { + status = Disabled; + } + + static Status GetStatus() { + return status; + } + + // Diagnostic information that can be provided to the JavaScript layer + // for a minimal level of testing + struct Diagnostics { + // this counts all stored mutexes - even if they are unlocked: + int storedMutexesCount; + }; + + static Diagnostics GetDiagnostics(); +}; + + +template<> inline void LockMaster::AddLocks(const git_repository *repo) { + // when using a repo, lock the repo + ObjectToLock(repo); +} + +template<> inline void LockMaster::AddLocks(const git_index *index) { + // when using an index, lock the repo, or if there isn't one lock the index + const void *owner = git_index_owner(index); + if(!owner) { + owner = index; + } + ObjectToLock(owner); +} + +template<> inline void LockMaster::AddLocks(const git_commit *commit) { + // when using a commit, lock the repo + const void *owner = git_commit_owner(commit); + ObjectToLock(owner); +} + +// ... more locking rules would go here. According to an analysis of idefs.json, +// the following types are passed as non-const * and may require locking +// (some likely, some probably not): +// 'git_annotated_commit', +// 'git_blame_options', +// 'git_blob', +// 'git_buf', +// 'git_checkout_options', +// 'git_cherrypick_options', +// 'git_clone_options', +// 'git_commit', +// 'git_config', +// 'git_diff', +// 'git_diff_perfdata', +// 'git_error', +// 'git_fetch_options', +// 'git_fetch_options', +// 'git_filter', +// 'git_filter_list', +// 'git_hashsig', +// 'git_index', +// 'git_merge_file_input', +// 'git_merge_options', +// 'git_merge_options', +// 'git_note', +// 'git_note_iterator', +// 'git_object', +// 'git_odb', +// 'git_odb_object', +// 'git_oid', +// 'git_oidarray', +// 'git_packbuilder', +// 'git_patch', +// 'git_pathspec', +// 'git_push_options', +// 'git_rebase', +// 'git_rebase_options', +// 'git_refdb', +// 'git_reference', +// 'git_reflog', +// 'git_remote', +// 'git_remote_callbacks', +// 'git_remote_callbacks', +// 'git_repository', +// 'git_repository_init_options', +// 'git_revwalk', +// 'git_signature', +// 'git_stash_apply_options', +// 'git_status_list', +// 'git_strarray', +// 'git_submodule', +// 'git_submodule_update_options', +// 'git_tag', +// 'git_transfer_progress', +// 'git_transport', +// 'git_tree', +// 'git_treebuilder', +// 'git_writestream' +// +// Other types are always passed as const * and perhaps don't require locking +// (it's not a guarantee though) + + +#endif diff --git a/generate/templates/manual/include/nodegit.h b/generate/templates/manual/include/nodegit.h new file mode 100644 index 000000000..a9cef2950 --- /dev/null +++ b/generate/templates/manual/include/nodegit.h @@ -0,0 +1,15 @@ +#ifndef NODEGIT_H +#define NODEGIT_H + +#include "thread_pool.h" + +extern ThreadPool libgit2ThreadPool; + +v8::Local GetPrivate(v8::Local object, + v8::Local key); + +void SetPrivate(v8::Local object, + v8::Local key, + v8::Local value); + +#endif diff --git a/generate/templates/manual/include/nodegit_wrapper.h b/generate/templates/manual/include/nodegit_wrapper.h new file mode 100644 index 000000000..ea5277a50 --- /dev/null +++ b/generate/templates/manual/include/nodegit_wrapper.h @@ -0,0 +1,64 @@ +#ifndef NODEGIT_WRAPPER_H +#define NODEGIT_WRAPPER_H + +#include + +// the Traits template parameter supplies: +// typename cppClass - the C++ type of the NodeGit wrapper (e.g. GitRepository) +// typename cType - the C type of the libgit2 object being wrapped (e.g. git_repository) +// +// static const bool isDuplicable +// static void duplicate(cType **dest, cType *src) - duplicates src using dupFunction or cpyFunction +// +// static const bool isFreeable +// static void free(cType *raw) - frees the object using freeFunctionName + +template +class NodeGitWrapper : public Nan::ObjectWrap { +public: + // replicate Traits typedefs for ease of use + typedef typename Traits::cType cType; + typedef typename Traits::cppClass cppClass; + + // whether raw should be freed on destruction + // TODO: this should be protected but we have a few use cases that change this to + // false from the outside. I suspect it gets turned to false to avoid + // double-free problems in cases like when we pass cred objects to libgit2 + // and it frees them. We should probably be NULLing raw in that case + // (and through a method) instead of changing selfFreeing, but that's + // a separate issue. + bool selfFreeing; +protected: + cType *raw; + + // owner of the object, in the memory management sense. only populated + // when using ownedByThis, and the type doesn't have a dupFunction + // CopyablePersistentTraits are used to get the reset-on-destruct behavior. + Nan::Persistent > owner; + + static Nan::Persistent constructor_template; + + // diagnostic count of self-freeing object instances + static int SelfFreeingInstanceCount; + // diagnostic count of constructed non-self-freeing object instances + static int NonSelfFreeingConstructedCount; + + static void InitializeTemplate(v8::Local &tpl); + + NodeGitWrapper(cType *raw, bool selfFreeing, v8::Local owner); + NodeGitWrapper(const char *error); // calls ThrowError + ~NodeGitWrapper(); + + static NAN_METHOD(JSNewFunction); + + static NAN_METHOD(GetSelfFreeingInstanceCount); + static NAN_METHOD(GetNonSelfFreeingConstructedCount); + +public: + static v8::Local New(const cType *raw, bool selfFreeing, v8::Local owner = v8::Local()); + + cType *GetValue(); + void ClearValue(); +}; + +#endif diff --git a/generate/templates/manual/include/promise_completion.h b/generate/templates/manual/include/promise_completion.h new file mode 100644 index 000000000..600fc0617 --- /dev/null +++ b/generate/templates/manual/include/promise_completion.h @@ -0,0 +1,46 @@ +#ifndef PROMISE_COMPLETION +#define PROMISE_COMPLETION + +#include + +#include "async_baton.h" + +// PromiseCompletion forwards either the resolved result or the rejection reason +// to the native layer, once the promise completes +// +// inherits ObjectWrap so it can be used in v8 and managed by the garbage collector +// it isn't wired up to be instantiated or accessed from the JS layer other than +// for the purpose of promise result forwarding +class PromiseCompletion : public Nan::ObjectWrap +{ + // callback type called when a promise completes + typedef void (*Callback) (bool isFulfilled, AsyncBaton *baton, v8::Local resultOfPromise); + + static NAN_METHOD(New); + static NAN_METHOD(PromiseFulfilled); + static NAN_METHOD(PromiseRejected); + + // persistent handles for NAN_METHODs + static Nan::Persistent newFn; + static Nan::Persistent promiseFulfilled; + static Nan::Persistent promiseRejected; + + static v8::Local Bind(Nan::Persistent &method, v8::Local object); + static void CallCallback(bool isFulfilled, const Nan::FunctionCallbackInfo &info); + + // callback and baton stored for the promise that this PromiseCompletion is + // attached to. when the promise completes, the callback will be called with + // the result, and the stored baton. + Callback callback; + AsyncBaton *baton; + + void Setup(v8::Local thenFn, v8::Local result, AsyncBaton *baton, Callback callback); +public: + // If result is a promise, this will instantiate a new PromiseCompletion + // and have it forward the promise result / reason via the baton and callback + static bool ForwardIfPromise(v8::Local result, AsyncBaton *baton, Callback callback); + + static void InitializeComponent(); +}; + +#endif diff --git a/generate/templates/manual/include/str_array_converter.h b/generate/templates/manual/include/str_array_converter.h new file mode 100644 index 000000000..c3509f128 --- /dev/null +++ b/generate/templates/manual/include/str_array_converter.h @@ -0,0 +1,23 @@ +#ifndef STR_ARRAY_H +#define STR_ARRAY_H + +#include + +#include "nan.h" +#include "git2/strarray.h" + +using namespace v8; + +class StrArrayConverter { + public: + + static git_strarray *Convert (Local val); + + private: + static git_strarray *ConvertArray(Array *val); + static git_strarray *ConvertString(Local val); + static git_strarray *AllocStrArray(const size_t count); + static git_strarray *ConstructStrArray(int argc, char** argv); +}; + +#endif diff --git a/generate/templates/manual/include/thread_pool.h b/generate/templates/manual/include/thread_pool.h new file mode 100644 index 000000000..6d29b3ddf --- /dev/null +++ b/generate/templates/manual/include/thread_pool.h @@ -0,0 +1,45 @@ +#ifndef THREAD_POOL_H +#define THREAD_POOL_H + +#include +#include + +class ThreadPool { + typedef void (*Callback) (void *); + struct Work { + Callback workCallback; + Callback loopCallback; + void *data; + + Work(Callback workCallback, Callback loopCallback, void *data) + : workCallback(workCallback), loopCallback(loopCallback), data(data) { + } + }; + + // work to be performed on the threadpool + std::queue workQueue; + uv_mutex_t workMutex; + uv_sem_t workSemaphore; + int workInProgressCount; + + // completion callbacks to be performed on the loop + std::queue loopQueue; + uv_mutex_t loopMutex; + uv_async_t loopAsync; + + static void RunEventQueue(void *threadPool); + void RunEventQueue(); + static void RunLoopCallbacks(uv_async_t* handle); + void RunLoopCallbacks(); +public: + // Initializes thread pool and spins up the requested number of threads + // The provided loop will be used for completion callbacks, whenever + // queued work is completed + ThreadPool(int numberOfThreads, uv_loop_t *loop); + // Queues work on the thread pool, followed by completion call scheduled + // on the loop provided in the constructor. + // QueueWork should be called on the loop provided in the constructor. + void QueueWork(Callback workCallback, Callback loopCallback, void *data); +}; + +#endif diff --git a/generate/templates/manual/include/typedefs.h b/generate/templates/manual/include/typedefs.h new file mode 100644 index 000000000..a1fe6311c --- /dev/null +++ b/generate/templates/manual/include/typedefs.h @@ -0,0 +1,8 @@ +#ifndef TYPEDEFS_H +#define TYPEDEFS_H + +#include "git2/submodule.h" + +typedef int (*git_submodule_cb)(git_submodule *sm, const char *name, void *payload); + +#endif diff --git a/generate/templates/manual/include/wrapper.h b/generate/templates/manual/include/wrapper.h new file mode 100644 index 000000000..c040ea64d --- /dev/null +++ b/generate/templates/manual/include/wrapper.h @@ -0,0 +1,34 @@ +/** + * This code is auto-generated; unless you know what you're doing, do not modify! + **/ + +#ifndef WRAPPER_H +#define WRAPPER_H + +#include +#include + +#include "nan.h" + +using namespace node; +using namespace v8; + +class Wrapper : public Nan::ObjectWrap { + public: + + static Nan::Persistent constructor_template; + static void InitializeComponent (Local target); + + void *GetValue(); + static Local New(const void *raw); + + private: + Wrapper(void *raw); + + static NAN_METHOD(JSNewFunction); + static NAN_METHOD(ToBuffer); + + void *raw; +}; + +#endif diff --git a/generate/templates/manual/patches/convenient_patches.cc b/generate/templates/manual/patches/convenient_patches.cc new file mode 100644 index 000000000..79823e37a --- /dev/null +++ b/generate/templates/manual/patches/convenient_patches.cc @@ -0,0 +1,124 @@ +NAN_METHOD(GitPatch::ConvenientFromDiff) { + if (info.Length() == 0 || !info[0]->IsObject()) { + return Nan::ThrowError("Diff diff is required."); + } + + if (info.Length() == 1 || !info[1]->IsFunction()) { + return Nan::ThrowError("Callback is required and must be a Function."); + } + + ConvenientFromDiffBaton *baton = new ConvenientFromDiffBaton; + + baton->error_code = GIT_OK; + baton->error = NULL; + + baton->diff = Nan::ObjectWrap::Unwrap(info[0]->ToObject())->GetValue(); + baton->out = new std::vector; + baton->out->reserve(git_diff_num_deltas(baton->diff)); + + Nan::Callback *callback = new Nan::Callback(Local::Cast(info[1])); + ConvenientFromDiffWorker *worker = new ConvenientFromDiffWorker(baton, callback); + + worker->SaveToPersistent("diff", info[0]); + + Nan::AsyncQueueWorker(worker); + return; +} + +void GitPatch::ConvenientFromDiffWorker::Execute() { + giterr_clear(); + + { + LockMaster lockMaster(true, baton->diff); + std::vector patchesToBeFreed; + + for (int i = 0; i < git_diff_num_deltas(baton->diff); ++i) { + git_patch *nextPatch; + int result = git_patch_from_diff(&nextPatch, baton->diff, i); + + if (result) { + while (!patchesToBeFreed.empty()) + { + git_patch_free(patchesToBeFreed.back()); + patchesToBeFreed.pop_back(); + } + + while (!baton->out->empty()) { + PatchDataFree(baton->out->back()); + baton->out->pop_back(); + } + + baton->error_code = result; + + if (giterr_last() != NULL) { + baton->error = git_error_dup(giterr_last()); + } + + delete baton->out; + baton->out = NULL; + + return; + } + + if (nextPatch != NULL) { + baton->out->push_back(createFromRaw(nextPatch)); + patchesToBeFreed.push_back(nextPatch); + } + } + + while (!patchesToBeFreed.empty()) + { + git_patch_free(patchesToBeFreed.back()); + patchesToBeFreed.pop_back(); + } + } +} + +void GitPatch::ConvenientFromDiffWorker::HandleOKCallback() { + if (baton->out != NULL) { + unsigned int size = baton->out->size(); + Local result = Nan::New(size); + + for (unsigned int i = 0; i < size; ++i) { + Nan::Set(result, Nan::New(i), ConvenientPatch::New((void *)baton->out->at(i))); + } + + delete baton->out; + + Local argv[2] = { + Nan::Null(), + result + }; + callback->Call(2, argv); + + return; + } + + if (baton->error) { + Local argv[1] = { + Nan::Error(baton->error->message) + }; + callback->Call(1, argv); + if (baton->error->message) + { + free((void *)baton->error->message); + } + + free((void *)baton->error); + + return; + } + + if (baton->error_code < 0) { + Local err = Nan::Error("method convenientFromDiff has thrown an error.")->ToObject(); + err->Set(Nan::New("errno").ToLocalChecked(), Nan::New(baton->error_code)); + Local argv[1] = { + err + }; + callback->Call(1, argv); + + return; + } + + callback->Call(0, NULL); +} diff --git a/generate/templates/manual/revwalk/fast_walk.cc b/generate/templates/manual/revwalk/fast_walk.cc new file mode 100644 index 000000000..2f263f83f --- /dev/null +++ b/generate/templates/manual/revwalk/fast_walk.cc @@ -0,0 +1,166 @@ +NAN_METHOD(GitRevwalk::FastWalk) +{ + if (info.Length() == 0 || !info[0]->IsNumber()) { + return Nan::ThrowError("Max count is required and must be a number."); + } + + if (info.Length() == 1 || !info[1]->IsFunction()) { + return Nan::ThrowError("Callback is required and must be a Function."); + } + + FastWalkBaton* baton = new FastWalkBaton; + + baton->error_code = GIT_OK; + baton->error = NULL; + baton->max_count = (unsigned int)info[0]->ToNumber()->Value(); + baton->out = new std::vector; + baton->out->reserve(baton->max_count); + baton->walk = Nan::ObjectWrap::Unwrap(info.This())->GetValue(); + + Nan::Callback *callback = new Nan::Callback(Local::Cast(info[1])); + FastWalkWorker *worker = new FastWalkWorker(baton, callback); + worker->SaveToPersistent("fastWalk", info.This()); + + Nan::AsyncQueueWorker(worker); + return; +} + +void GitRevwalk::FastWalkWorker::Execute() +{ + for (int i = 0; i < baton->max_count; i++) + { + git_oid *nextCommit = (git_oid *)malloc(sizeof(git_oid)); + giterr_clear(); + baton->error_code = git_revwalk_next(nextCommit, baton->walk); + + if (baton->error_code != GIT_OK) + { + // We couldn't get a commit out of the revwalk. It's either in + // an error state or there aren't anymore commits in the revwalk. + free(nextCommit); + + if (baton->error_code != GIT_ITEROVER) { + baton->error = git_error_dup(giterr_last()); + + while(!baton->out->empty()) + { + // part of me wants to #define shoot free so we can take the + // baton out back and shoot the oids + git_oid *oidToFree = baton->out->back(); + free(oidToFree); + baton->out->pop_back(); + } + + delete baton->out; + + baton->out = NULL; + } + else { + baton->error_code = GIT_OK; + } + + break; + } + + baton->out->push_back(nextCommit); + } +} + +void GitRevwalk::FastWalkWorker::HandleOKCallback() +{ + if (baton->out != NULL) + { + unsigned int size = baton->out->size(); + Local result = Nan::New(size); + for (unsigned int i = 0; i < size; i++) { + Nan::Set(result, Nan::New(i), GitOid::New(baton->out->at(i), true)); + } + + delete baton->out; + + Local argv[2] = { + Nan::Null(), + result + }; + callback->Call(2, argv); + } + else + { + if (baton->error) + { + Local argv[1] = { + Nan::Error(baton->error->message) + }; + callback->Call(1, argv); + if (baton->error->message) + { + free((void *)baton->error->message); + } + + free((void *)baton->error); + } + else if (baton->error_code < 0) + { + std::queue< Local > workerArguments; + bool callbackFired = false; + + while(!workerArguments.empty()) + { + Local node = workerArguments.front(); + workerArguments.pop(); + + if ( + !node->IsObject() + || node->IsArray() + || node->IsBooleanObject() + || node->IsDate() + || node->IsFunction() + || node->IsNumberObject() + || node->IsRegExp() + || node->IsStringObject() + ) + { + continue; + } + + Local nodeObj = node->ToObject(); + Local checkValue = GetPrivate(nodeObj, Nan::New("NodeGitPromiseError").ToLocalChecked()); + + if (!checkValue.IsEmpty() && !checkValue->IsNull() && !checkValue->IsUndefined()) + { + Local argv[1] = { + checkValue->ToObject() + }; + callback->Call(1, argv); + callbackFired = true; + break; + } + + Local properties = nodeObj->GetPropertyNames(); + for (unsigned int propIndex = 0; propIndex < properties->Length(); ++propIndex) + { + Local propName = properties->Get(propIndex)->ToString(); + Local nodeToQueue = nodeObj->Get(propName); + if (!nodeToQueue->IsUndefined()) + { + workerArguments.push(nodeToQueue); + } + } + } + + if (!callbackFired) + { + Local err = Nan::Error("Method next has thrown an error.")->ToObject(); + err->Set(Nan::New("errno").ToLocalChecked(), Nan::New(baton->error_code)); + Local argv[1] = { + err + }; + callback->Call(1, argv); + } + } + else + { + callback->Call(0, NULL); + } + } +} diff --git a/generate/templates/manual/revwalk/file_history_walk.cc b/generate/templates/manual/revwalk/file_history_walk.cc new file mode 100644 index 000000000..0f511969f --- /dev/null +++ b/generate/templates/manual/revwalk/file_history_walk.cc @@ -0,0 +1,316 @@ +NAN_METHOD(GitRevwalk::FileHistoryWalk) +{ + if (info.Length() == 0 || !info[0]->IsString()) { + return Nan::ThrowError("File path to get the history is required."); + } + + if (info.Length() == 1 || !info[1]->IsNumber()) { + return Nan::ThrowError("Max count is required and must be a number."); + } + + if (info.Length() == 2 || !info[2]->IsFunction()) { + return Nan::ThrowError("Callback is required and must be a Function."); + } + + FileHistoryWalkBaton* baton = new FileHistoryWalkBaton; + + baton->error_code = GIT_OK; + baton->error = NULL; + String::Utf8Value from_js_file_path(info[0]->ToString()); + baton->file_path = strdup(*from_js_file_path); + baton->max_count = (unsigned int)info[1]->ToNumber()->Value(); + baton->out = new std::vector< std::pair > *>; + baton->out->reserve(baton->max_count); + baton->walk = Nan::ObjectWrap::Unwrap(info.This())->GetValue(); + + Nan::Callback *callback = new Nan::Callback(Local::Cast(info[2])); + FileHistoryWalkWorker *worker = new FileHistoryWalkWorker(baton, callback); + worker->SaveToPersistent("fileHistoryWalk", info.This()); + + Nan::AsyncQueueWorker(worker); + return; +} + +void GitRevwalk::FileHistoryWalkWorker::Execute() +{ + git_repository *repo = git_revwalk_repository(baton->walk); + git_oid *nextOid = (git_oid *)malloc(sizeof(git_oid)); + giterr_clear(); + for ( + unsigned int i = 0; + i < baton->max_count && (baton->error_code = git_revwalk_next(nextOid, baton->walk)) == GIT_OK; + ++i + ) { + // check if this commit has the file + git_commit *nextCommit; + + if ((baton->error_code = git_commit_lookup(&nextCommit, repo, nextOid)) != GIT_OK) { + break; + } + + git_tree *thisTree, *parentTree; + if ((baton->error_code = git_commit_tree(&thisTree, nextCommit)) != GIT_OK) { + git_commit_free(nextCommit); + break; + } + + git_diff *diffs; + git_diff_options opts = GIT_DIFF_OPTIONS_INIT; + char *file_path = strdup(baton->file_path); + opts.pathspec.strings = &file_path; + opts.pathspec.count = 1; + git_commit *parent; + unsigned int parents = git_commit_parentcount(nextCommit); + if (parents > 1) { + git_commit_free(nextCommit); + continue; + } else if (parents == 1) { + if ((baton->error_code = git_commit_parent(&parent, nextCommit, 0)) != GIT_OK) { + git_commit_free(nextCommit); + break; + } + if ( + (baton->error_code = git_commit_tree(&parentTree, parent)) != GIT_OK || + (baton->error_code = git_diff_tree_to_tree(&diffs, repo, parentTree, thisTree, &opts)) != GIT_OK + ) { + git_commit_free(nextCommit); + git_commit_free(parent); + break; + } + } else { + if ((baton->error_code = git_diff_tree_to_tree(&diffs, repo, NULL, thisTree, &opts)) != GIT_OK) { + git_commit_free(nextCommit); + break; + } + } + + free(file_path); + opts.pathspec.strings = NULL; + opts.pathspec.count = 0; + + bool flag = false; + bool doRenamedPass = false; + unsigned int numDeltas = git_diff_num_deltas(diffs); + for (unsigned int j = 0; j < numDeltas; ++j) { + git_patch *nextPatch; + baton->error_code = git_patch_from_diff(&nextPatch, diffs, j); + + if (baton->error_code < GIT_OK) { + break; + } + + if (nextPatch == NULL) { + continue; + } + + const git_diff_delta *delta = git_patch_get_delta(nextPatch); + bool isEqualOldFile = !strcmp(delta->old_file.path, baton->file_path); + bool isEqualNewFile = !strcmp(delta->new_file.path, baton->file_path); + + if (isEqualNewFile) { + if (delta->status == GIT_DELTA_ADDED || delta->status == GIT_DELTA_DELETED) { + doRenamedPass = true; + break; + } + std::pair > *historyEntry; + if (!isEqualOldFile) { + historyEntry = new std::pair >( + nextCommit, + std::pair(strdup(delta->old_file.path), delta->status) + ); + } else { + historyEntry = new std::pair >( + nextCommit, + std::pair(strdup(delta->new_file.path), delta->status) + ); + } + baton->out->push_back(historyEntry); + flag = true; + } + + git_patch_free(nextPatch); + + if (flag) { + break; + } + } + + if (doRenamedPass) { + git_diff_free(diffs); + + if (parents == 1) { + if ((baton->error_code = git_diff_tree_to_tree(&diffs, repo, parentTree, thisTree, NULL)) != GIT_OK) { + git_commit_free(nextCommit); + break; + } + if ((baton->error_code = git_diff_find_similar(diffs, NULL)) != GIT_OK) { + git_commit_free(nextCommit); + break; + } + } else { + if ((baton->error_code = git_diff_tree_to_tree(&diffs, repo, NULL, thisTree, NULL)) != GIT_OK) { + git_commit_free(nextCommit); + break; + } + if((baton->error_code = git_diff_find_similar(diffs, NULL)) != GIT_OK) { + git_commit_free(nextCommit); + break; + } + } + + flag = false; + numDeltas = git_diff_num_deltas(diffs); + for (unsigned int j = 0; j < numDeltas; ++j) { + git_patch *nextPatch; + baton->error_code = git_patch_from_diff(&nextPatch, diffs, j); + + if (baton->error_code < GIT_OK) { + break; + } + + if (nextPatch == NULL) { + continue; + } + + const git_diff_delta *delta = git_patch_get_delta(nextPatch); + bool isEqualOldFile = !strcmp(delta->old_file.path, baton->file_path); + bool isEqualNewFile = !strcmp(delta->new_file.path, baton->file_path); + int oldLen = strlen(delta->old_file.path); + int newLen = strlen(delta->new_file.path); + char *outPair = new char[oldLen + newLen + 2]; + strcpy(outPair, delta->new_file.path); + outPair[newLen] = '\n'; + outPair[newLen + 1] = '\0'; + strcat(outPair, delta->old_file.path); + + if (isEqualNewFile) { + std::pair > *historyEntry; + if (!isEqualOldFile) { + historyEntry = new std::pair >( + nextCommit, + std::pair(strdup(outPair), delta->status) + ); + } else { + historyEntry = new std::pair >( + nextCommit, + std::pair(strdup(delta->new_file.path), delta->status) + ); + } + baton->out->push_back(historyEntry); + flag = true; + } else if (isEqualOldFile) { + std::pair > *historyEntry; + historyEntry = new std::pair >( + nextCommit, + std::pair(strdup(outPair), delta->status) + ); + baton->out->push_back(historyEntry); + flag = true; + } + + delete[] outPair; + + git_patch_free(nextPatch); + + if (flag) { + break; + } + } + } + + git_diff_free(diffs); + + if (!flag && nextCommit != NULL) { + git_commit_free(nextCommit); + } + + if (baton->error_code != GIT_OK) { + break; + } + } + + free(nextOid); + + if (baton->error_code != GIT_OK) { + if (baton->error_code != GIT_ITEROVER) { + baton->error = git_error_dup(giterr_last()); + + while(!baton->out->empty()) + { + std::pair > *pairToFree = baton->out->back(); + baton->out->pop_back(); + git_commit_free(pairToFree->first); + free(pairToFree->second.first); + free(pairToFree); + } + + delete baton->out; + + baton->out = NULL; + } + } else { + baton->error_code = GIT_OK; + } +} + +void GitRevwalk::FileHistoryWalkWorker::HandleOKCallback() +{ + if (baton->out != NULL) { + unsigned int size = baton->out->size(); + Local result = Nan::New(size); + for (unsigned int i = 0; i < size; i++) { + Local historyEntry = Nan::New(); + std::pair > *batonResult = baton->out->at(i); + Nan::Set(historyEntry, Nan::New("commit").ToLocalChecked(), GitCommit::New(batonResult->first, true)); + Nan::Set(historyEntry, Nan::New("status").ToLocalChecked(), Nan::New(batonResult->second.second)); + if (batonResult->second.second == GIT_DELTA_RENAMED) { + char *namePair = batonResult->second.first; + char *split = strchr(namePair, '\n'); + *split = '\0'; + char *oldName = split + 1; + + Nan::Set(historyEntry, Nan::New("oldName").ToLocalChecked(), Nan::New(oldName).ToLocalChecked()); + Nan::Set(historyEntry, Nan::New("newName").ToLocalChecked(), Nan::New(namePair).ToLocalChecked()); + } + Nan::Set(result, Nan::New(i), historyEntry); + + free(batonResult->second.first); + free(batonResult); + } + + Local argv[2] = { + Nan::Null(), + result + }; + callback->Call(2, argv); + + delete baton->out; + return; + } + + if (baton->error) { + Local argv[1] = { + Nan::Error(baton->error->message) + }; + callback->Call(1, argv); + if (baton->error->message) + { + free((void *)baton->error->message); + } + + free((void *)baton->error); + return; + } + + if (baton->error_code < 0) { + Local err = Nan::Error("Method next has thrown an error.")->ToObject(); + err->Set(Nan::New("errno").ToLocalChecked(), Nan::New(baton->error_code)); + Local argv[1] = { + err + }; + callback->Call(1, argv); + return; + } + + callback->Call(0, NULL); +} diff --git a/generate/templates/manual/src/convenient_hunk.cc b/generate/templates/manual/src/convenient_hunk.cc new file mode 100644 index 000000000..845ba4098 --- /dev/null +++ b/generate/templates/manual/src/convenient_hunk.cc @@ -0,0 +1,185 @@ +#include +#include + +extern "C" { + #include +} + +#include "../include/functions/copy.h" +#include "../include/convenient_hunk.h" +#include "../include/diff_line.h" + +using namespace std; +using namespace v8; +using namespace node; + +void HunkDataFree(HunkData *hunk) { + while (!hunk->lines->empty()) { + git_diff_line *line = hunk->lines->back(); + hunk->lines->pop_back(); + free((void *)line->content); + free((void *)line); + } + delete hunk->lines; + delete hunk; +} + +ConvenientHunk::ConvenientHunk(HunkData *raw) { + this->hunk = raw; +} + +ConvenientHunk::~ConvenientHunk() { + HunkDataFree(this->hunk); +} + +void ConvenientHunk::InitializeComponent(Local target) { + Nan::HandleScope scope; + + Local tpl = Nan::New(JSNewFunction); + + tpl->InstanceTemplate()->SetInternalFieldCount(1); + tpl->SetClassName(Nan::New("ConvenientHunk").ToLocalChecked()); + + Nan::SetPrototypeMethod(tpl, "size", Size); + Nan::SetPrototypeMethod(tpl, "lines", Lines); + + Nan::SetPrototypeMethod(tpl, "oldStart", OldStart); + Nan::SetPrototypeMethod(tpl, "oldLines", OldLines); + Nan::SetPrototypeMethod(tpl, "newStart", NewStart); + Nan::SetPrototypeMethod(tpl, "newLines", NewLines); + Nan::SetPrototypeMethod(tpl, "headerLen", HeaderLen); + Nan::SetPrototypeMethod(tpl, "header", Header); + + Local _constructor_template = Nan::GetFunction(tpl).ToLocalChecked(); + constructor_template.Reset(_constructor_template); + Nan::Set(target, Nan::New("ConvenientHunk").ToLocalChecked(), _constructor_template); +} + +NAN_METHOD(ConvenientHunk::JSNewFunction) { + + if (info.Length() == 0 || !info[0]->IsExternal()) { + return Nan::ThrowError("A new ConvenientHunk cannot be instantiated."); + } + + ConvenientHunk* object = new ConvenientHunk(static_cast(Local::Cast(info[0])->Value())); + object->Wrap(info.This()); + + info.GetReturnValue().Set(info.This()); +} + +Local ConvenientHunk::New(void *raw) { + Nan::EscapableHandleScope scope; + Local argv[1] = { Nan::New((void *)raw) }; + return scope.Escape(Nan::NewInstance(Nan::New(ConvenientHunk::constructor_template), 1, argv).ToLocalChecked()); +} + +HunkData *ConvenientHunk::GetValue() { + return this->hunk; +} + +size_t ConvenientHunk::GetSize() { + return this->hunk->numLines; +} + +NAN_METHOD(ConvenientHunk::Size) { + Local to; + to = Nan::New(Nan::ObjectWrap::Unwrap(info.This())->GetSize()); + info.GetReturnValue().Set(to); +} + +NAN_METHOD(ConvenientHunk::Lines) { + if (info.Length() == 0 || !info[0]->IsFunction()) { + return Nan::ThrowError("Callback is required and must be a Function."); + } + + LinesBaton *baton = new LinesBaton; + + baton->hunk = Nan::ObjectWrap::Unwrap(info.This())->GetValue(); + + Nan::Callback *callback = new Nan::Callback(Local::Cast(info[0])); + LinesWorker *worker = new LinesWorker(baton, callback); + + worker->SaveToPersistent("hunk", info.This()); + + Nan::AsyncQueueWorker(worker); + return; +} + +void ConvenientHunk::LinesWorker::Execute() { + baton->lines = new std::vector; + baton->lines->reserve(baton->hunk->numLines); + for (unsigned int i = 0; i < baton->hunk->numLines; ++i) { + git_diff_line *storeLine = (git_diff_line *)malloc(sizeof(git_diff_line)); + storeLine->origin = baton->hunk->lines->at(i)->origin; + storeLine->old_lineno = baton->hunk->lines->at(i)->old_lineno; + storeLine->new_lineno = baton->hunk->lines->at(i)->new_lineno; + storeLine->num_lines = baton->hunk->lines->at(i)->num_lines; + storeLine->content_len = baton->hunk->lines->at(i)->content_len; + storeLine->content_offset = baton->hunk->lines->at(i)->content_offset; + storeLine->content = strdup(baton->hunk->lines->at(i)->content); + baton->lines->push_back(storeLine); + } +} + +void ConvenientHunk::LinesWorker::HandleOKCallback() { + unsigned int size = baton->lines->size(); + Local result = Nan::New(size); + + for(unsigned int i = 0; i < size; ++i) { + Nan::Set(result, Nan::New(i), GitDiffLine::New(baton->lines->at(i), true)); + } + + delete baton->lines; + + Local argv[2] = { + Nan::Null(), + result + }; + callback->Call(2, argv); +} + +NAN_METHOD(ConvenientHunk::OldStart) { + Local to; + int old_start = Nan::ObjectWrap::Unwrap(info.This())->GetValue()->hunk.old_start; + info.GetReturnValue().Set(Nan::New(old_start)); +} + + +NAN_METHOD(ConvenientHunk::OldLines) { + Local to; + int old_lines = Nan::ObjectWrap::Unwrap(info.This())->GetValue()->hunk.old_lines; + info.GetReturnValue().Set(Nan::New(old_lines)); +} + +NAN_METHOD(ConvenientHunk::NewStart) { + Local to; + int new_start = Nan::ObjectWrap::Unwrap(info.This())->GetValue()->hunk.new_start; + info.GetReturnValue().Set(Nan::New(new_start)); +} + +NAN_METHOD(ConvenientHunk::NewLines) { + Local to; + int new_lines = Nan::ObjectWrap::Unwrap(info.This())->GetValue()->hunk.new_lines; + info.GetReturnValue().Set(Nan::New(new_lines)); +} + +NAN_METHOD(ConvenientHunk::HeaderLen) { + Local to; + size_t header_len = Nan::ObjectWrap::Unwrap(info.This())->GetValue()->hunk.header_len; + info.GetReturnValue().Set(Nan::New(header_len)); +} + +NAN_METHOD(ConvenientHunk::Header) { + Local to; + + char *header = Nan::ObjectWrap::Unwrap(info.This())->GetValue()->hunk.header; + if (header) { + to = Nan::New(header).ToLocalChecked(); + } else { + to = Nan::Null(); + } + + info.GetReturnValue().Set(to); +} + +Nan::Persistent ConvenientHunk::constructor_template; diff --git a/generate/templates/manual/src/convenient_patch.cc b/generate/templates/manual/src/convenient_patch.cc new file mode 100644 index 000000000..dbe75ba74 --- /dev/null +++ b/generate/templates/manual/src/convenient_patch.cc @@ -0,0 +1,400 @@ +#include +#include + +extern "C" { + #include +} + +#include "../include/convenient_hunk.h" +#include "../include/convenient_patch.h" +#include "../include/functions/copy.h" +#include "../include/diff_file.h" + +using namespace std; +using namespace v8; +using namespace node; + +void PatchDataFree(PatchData *patch) { + free((void *)patch->old_file.path); + free((void *)patch->new_file.path); + while(!patch->hunks->empty()) { + HunkData *hunk = patch->hunks->back(); + patch->hunks->pop_back(); + while (!hunk->lines->empty()) { + git_diff_line *line = hunk->lines->back(); + hunk->lines->pop_back(); + free((void *)line->content); + free((void *)line); + } + delete hunk; + } + delete patch; +} + +PatchData *createFromRaw(git_patch *raw) { + PatchData *patch = new PatchData; + const git_diff_delta *delta = git_patch_get_delta(raw); + + patch->status = delta->status; + + patch->old_file = delta->old_file; + patch->old_file.path = strdup(delta->old_file.path); + + patch->new_file = delta->new_file; + patch->new_file.path = strdup(delta->new_file.path); + + git_patch_line_stats( + &patch->lineStats.context, + &patch->lineStats.additions, + &patch->lineStats.deletions, + raw + ); + + patch->numHunks = git_patch_num_hunks(raw); + patch->hunks = new std::vector; + patch->hunks->reserve(patch->numHunks); + + for (unsigned int i = 0; i < patch->numHunks; ++i) { + HunkData *hunkData = new HunkData; + const git_diff_hunk *hunk = NULL; + int result = git_patch_get_hunk(&hunk, &hunkData->numLines, raw, i); + if (result != 0) { + continue; + } + + hunkData->hunk.old_start = hunk->old_start; + hunkData->hunk.old_lines = hunk->old_lines; + hunkData->hunk.new_start = hunk->new_start; + hunkData->hunk.new_lines = hunk->new_lines; + hunkData->hunk.header_len = hunk->header_len; + memcpy(&hunkData->hunk.header, &hunk->header, 128); + + hunkData->lines = new std::vector; + hunkData->lines->reserve(hunkData->numLines); + + static const int noNewlineStringLength = 29; + bool EOFFlag = false; + for (unsigned int j = 0; j < hunkData->numLines; ++j) { + git_diff_line *storeLine = (git_diff_line *)malloc(sizeof(git_diff_line)); + const git_diff_line *line = NULL; + int result = git_patch_get_line_in_hunk(&line, raw, i, j); + if (result != 0) { + continue; + } + + if (j == 0) { + int calculatedContentLength = line->content_len; + if ( + calculatedContentLength > noNewlineStringLength && + !strncmp( + &line->content[calculatedContentLength - noNewlineStringLength], + "\n\\ No newline at end of file\n", (std::min)(calculatedContentLength, noNewlineStringLength) + )) { + EOFFlag = true; + } + } + + storeLine->origin = line->origin; + storeLine->old_lineno = line->old_lineno; + storeLine->new_lineno = line->new_lineno; + storeLine->num_lines = line->num_lines; + storeLine->content_len = line->content_len; + storeLine->content_offset = line->content_offset; + char * transferContent; + if (EOFFlag) { + transferContent = (char *)malloc(storeLine->content_len + noNewlineStringLength + 1); + memcpy(transferContent, line->content, storeLine->content_len); + memcpy(transferContent + storeLine->content_len, "\n\\ No newline at end of file\n", noNewlineStringLength); + transferContent[storeLine->content_len + noNewlineStringLength] = '\0'; + } else { + transferContent = (char *)malloc(storeLine->content_len + 1); + memcpy(transferContent, line->content, storeLine->content_len); + transferContent[storeLine->content_len] = '\0'; + } + storeLine->content = strdup(transferContent); + free((void *)transferContent); + hunkData->lines->push_back(storeLine); + } + patch->hunks->push_back(hunkData); + } + + return patch; +} + +ConvenientPatch::ConvenientPatch(PatchData *raw) { + this->patch = raw; +} + +ConvenientPatch::~ConvenientPatch() { + PatchDataFree(this->patch); +} + +void ConvenientPatch::InitializeComponent(Local target) { + Nan::HandleScope scope; + + Local tpl = Nan::New(JSNewFunction); + + tpl->InstanceTemplate()->SetInternalFieldCount(1); + tpl->SetClassName(Nan::New("ConvenientPatch").ToLocalChecked()); + + Nan::SetPrototypeMethod(tpl, "hunks", Hunks); + Nan::SetPrototypeMethod(tpl, "lineStats", LineStats); + Nan::SetPrototypeMethod(tpl, "size", Size); + + Nan::SetPrototypeMethod(tpl, "oldFile", OldFile); + Nan::SetPrototypeMethod(tpl, "newFile", NewFile); + Nan::SetPrototypeMethod(tpl, "status", Status); + Nan::SetPrototypeMethod(tpl, "isUnmodified", IsUnmodified); + Nan::SetPrototypeMethod(tpl, "isAdded", IsAdded); + Nan::SetPrototypeMethod(tpl, "isDeleted", IsDeleted); + Nan::SetPrototypeMethod(tpl, "isModified", IsModified); + Nan::SetPrototypeMethod(tpl, "isRenamed", IsRenamed); + Nan::SetPrototypeMethod(tpl, "isCopied", IsCopied); + Nan::SetPrototypeMethod(tpl, "isIgnored", IsIgnored); + Nan::SetPrototypeMethod(tpl, "isUntracked", IsUntracked); + Nan::SetPrototypeMethod(tpl, "isTypeChange", IsTypeChange); + Nan::SetPrototypeMethod(tpl, "isUnreadable", IsUnreadable); + Nan::SetPrototypeMethod(tpl, "isConflicted", IsConflicted); + + Local _constructor_template = Nan::GetFunction(tpl).ToLocalChecked(); + constructor_template.Reset(_constructor_template); + Nan::Set(target, Nan::New("ConvenientPatch").ToLocalChecked(), _constructor_template); +} + +NAN_METHOD(ConvenientPatch::JSNewFunction) { + + if (info.Length() == 0 || !info[0]->IsExternal()) { + return Nan::ThrowError("A new ConvenientPatch cannot be instantiated."); + } + + ConvenientPatch* object = new ConvenientPatch(static_cast(Local::Cast(info[0])->Value())); + object->Wrap(info.This()); + + info.GetReturnValue().Set(info.This()); +} + +Local ConvenientPatch::New(void *raw) { + Nan::EscapableHandleScope scope; + Local argv[1] = { Nan::New((void *)raw) }; + return scope.Escape(Nan::NewInstance(Nan::New(ConvenientPatch::constructor_template), 1, argv).ToLocalChecked()); +} + +ConvenientLineStats ConvenientPatch::GetLineStats() { + return this->patch->lineStats; +} + +git_delta_t ConvenientPatch::GetStatus() { + return this->patch->status; +} + +git_diff_file ConvenientPatch::GetOldFile() { + return this->patch->old_file; +} + +git_diff_file ConvenientPatch::GetNewFile() { + return this->patch->new_file; +} + +size_t ConvenientPatch::GetNumHunks() { + return this->patch->numHunks; +} + +PatchData *ConvenientPatch::GetValue() { + return this->patch; +} + +NAN_METHOD(ConvenientPatch::Hunks) { + if (info.Length() == 0 || !info[0]->IsFunction()) { + return Nan::ThrowError("Callback is required and must be a Function."); + } + + HunksBaton *baton = new HunksBaton; + + baton->patch = Nan::ObjectWrap::Unwrap(info.This())->GetValue(); + + Nan::Callback *callback = new Nan::Callback(Local::Cast(info[0])); + HunksWorker *worker = new HunksWorker(baton, callback); + + worker->SaveToPersistent("patch", info.This()); + + Nan::AsyncQueueWorker(worker); + return; +} + +void ConvenientPatch::HunksWorker::Execute() { + // copy hunks + baton->hunks = new std::vector; + baton->hunks->reserve(baton->patch->numHunks); + + for (unsigned int i = 0; i < baton->patch->numHunks; ++i) { + HunkData *hunkData = new HunkData; + hunkData->numLines = baton->patch->hunks->at(i)->numLines; + hunkData->hunk.old_start = baton->patch->hunks->at(i)->hunk.old_start; + hunkData->hunk.old_lines = baton->patch->hunks->at(i)->hunk.old_lines; + hunkData->hunk.new_start = baton->patch->hunks->at(i)->hunk.new_start; + hunkData->hunk.new_lines = baton->patch->hunks->at(i)->hunk.new_lines; + hunkData->hunk.header_len = baton->patch->hunks->at(i)->hunk.header_len; + memcpy(&hunkData->hunk.header, &baton->patch->hunks->at(i)->hunk.header, 128); + + hunkData->lines = new std::vector; + hunkData->lines->reserve(hunkData->numLines); + + for (unsigned int j = 0; j < hunkData->numLines; ++j) { + git_diff_line *storeLine = (git_diff_line *)malloc(sizeof(git_diff_line)); + storeLine->origin = baton->patch->hunks->at(i)->lines->at(j)->origin; + storeLine->old_lineno = baton->patch->hunks->at(i)->lines->at(j)->old_lineno; + storeLine->new_lineno = baton->patch->hunks->at(i)->lines->at(j)->new_lineno; + storeLine->num_lines = baton->patch->hunks->at(i)->lines->at(j)->num_lines; + storeLine->content_len = baton->patch->hunks->at(i)->lines->at(j)->content_len; + storeLine->content_offset = baton->patch->hunks->at(i)->lines->at(j)->content_offset; + storeLine->content = strdup(baton->patch->hunks->at(i)->lines->at(j)->content); + hunkData->lines->push_back(storeLine); + } + baton->hunks->push_back(hunkData); + } +} + +void ConvenientPatch::HunksWorker::HandleOKCallback() { + unsigned int size = baton->hunks->size(); + Local result = Nan::New(size); + + for(unsigned int i = 0; i < size; ++i) { + Nan::Set(result, Nan::New(i), ConvenientHunk::New(baton->hunks->at(i))); + } + + delete baton->hunks; + + Local argv[2] = { + Nan::Null(), + result + }; + callback->Call(2, argv); +} + +NAN_METHOD(ConvenientPatch::LineStats) { + Nan::EscapableHandleScope scope; + + Local to; + Local toReturn = Nan::New(); + ConvenientLineStats stats = Nan::ObjectWrap::Unwrap(info.This())->GetLineStats(); + + to = Nan::New(stats.context); + Nan::Set(toReturn, Nan::New("total_context").ToLocalChecked(), to); + to = Nan::New(stats.additions); + Nan::Set(toReturn, Nan::New("total_additions").ToLocalChecked(), to); + to = Nan::New(stats.deletions); + Nan::Set(toReturn, Nan::New("total_deletions").ToLocalChecked(), to); + + return info.GetReturnValue().Set(scope.Escape(toReturn)); +} + +NAN_METHOD(ConvenientPatch::Size) { + Local to; + + to = Nan::New(Nan::ObjectWrap::Unwrap(info.This())->GetNumHunks()); + + info.GetReturnValue().Set(to); +} + +NAN_METHOD(ConvenientPatch::OldFile) { + Nan::EscapableHandleScope scope; + + Local to; + git_diff_file *old_file = (git_diff_file *)malloc(sizeof(git_diff_file)); + *old_file = Nan::ObjectWrap::Unwrap(info.This())->GetOldFile(); + + to = GitDiffFile::New(old_file, true); + + return info.GetReturnValue().Set(to); +} + +NAN_METHOD(ConvenientPatch::NewFile) { + Nan::EscapableHandleScope scope; + + Local to; + git_diff_file *new_file = (git_diff_file *)malloc(sizeof(git_diff_file)); + *new_file = Nan::ObjectWrap::Unwrap(info.This())->GetNewFile(); + if (new_file != NULL) { + to = GitDiffFile::New(new_file, true); + } else { + to = Nan::Null(); + } + + return info.GetReturnValue().Set(to); +} + +NAN_METHOD(ConvenientPatch::Status) { + Local to; + to = Nan::New(Nan::ObjectWrap::Unwrap(info.This())->GetStatus()); + info.GetReturnValue().Set(to); +} + +NAN_METHOD(ConvenientPatch::IsUnmodified) { + Nan::EscapableHandleScope scope; + + Local to; + to = Nan::New(Nan::ObjectWrap::Unwrap(info.This())->GetStatus() == GIT_DELTA_UNMODIFIED); + info.GetReturnValue().Set(to); +} + +NAN_METHOD(ConvenientPatch::IsAdded) { + Local to; + to = Nan::New(Nan::ObjectWrap::Unwrap(info.This())->GetStatus() == GIT_DELTA_ADDED); + info.GetReturnValue().Set(to); +} + +NAN_METHOD(ConvenientPatch::IsDeleted) { + Local to; + to = Nan::New(Nan::ObjectWrap::Unwrap(info.This())->GetStatus() == GIT_DELTA_DELETED); + info.GetReturnValue().Set(to); +} + +NAN_METHOD(ConvenientPatch::IsModified) { + Local to; + to = Nan::New(Nan::ObjectWrap::Unwrap(info.This())->GetStatus() == GIT_DELTA_MODIFIED); + info.GetReturnValue().Set(to); +} + +NAN_METHOD(ConvenientPatch::IsRenamed) { + Local to; + to = Nan::New(Nan::ObjectWrap::Unwrap(info.This())->GetStatus() == GIT_DELTA_RENAMED); + info.GetReturnValue().Set(to); +} + +NAN_METHOD(ConvenientPatch::IsCopied) { + Local to; + to = Nan::New(Nan::ObjectWrap::Unwrap(info.This())->GetStatus() == GIT_DELTA_COPIED); + info.GetReturnValue().Set(to); +} + +NAN_METHOD(ConvenientPatch::IsIgnored) { + Local to; + to = Nan::New(Nan::ObjectWrap::Unwrap(info.This())->GetStatus() == GIT_DELTA_IGNORED); + info.GetReturnValue().Set(to); +} + +NAN_METHOD(ConvenientPatch::IsUntracked) { + Local to; + to = Nan::New(Nan::ObjectWrap::Unwrap(info.This())->GetStatus() == GIT_DELTA_UNTRACKED); + info.GetReturnValue().Set(to); +} + +NAN_METHOD(ConvenientPatch::IsTypeChange) { + Local to; + to = Nan::New(Nan::ObjectWrap::Unwrap(info.This())->GetStatus() == GIT_DELTA_TYPECHANGE); + info.GetReturnValue().Set(to); +} + +NAN_METHOD(ConvenientPatch::IsUnreadable) { + Local to; + to = Nan::New(Nan::ObjectWrap::Unwrap(info.This())->GetStatus() == GIT_DELTA_UNREADABLE); + info.GetReturnValue().Set(to); +} + +NAN_METHOD(ConvenientPatch::IsConflicted) { + Local to; + to = Nan::New(Nan::ObjectWrap::Unwrap(info.This())->GetStatus() == GIT_DELTA_CONFLICTED); + info.GetReturnValue().Set(to); +} + +Nan::Persistent ConvenientPatch::constructor_template; diff --git a/generate/templates/manual/src/functions/copy.cc b/generate/templates/manual/src/functions/copy.cc new file mode 100644 index 000000000..f09f2cbaa --- /dev/null +++ b/generate/templates/manual/src/functions/copy.cc @@ -0,0 +1,22 @@ +#include +#include + +#include "git2.h" +#include "git2/diff.h" + +const git_error *git_error_dup(const git_error *arg) { + git_error *result = (git_error *)malloc(sizeof(git_error)); + result->klass = arg->klass; + result->message = strdup(arg->message); + return result; +} + +void git_time_dup(git_time **out, const git_time *arg) { + *out = (git_time *)malloc(sizeof(git_time)); + memcpy(*out, arg, sizeof(git_time)); +} + +void git_transfer_progress_dup(git_transfer_progress **out, const git_transfer_progress *arg) { + *out = (git_transfer_progress *)malloc(sizeof(git_transfer_progress)); + memcpy(*out, arg, sizeof(git_transfer_progress)); +} diff --git a/generate/templates/manual/src/functions/sleep_for_ms.cc b/generate/templates/manual/src/functions/sleep_for_ms.cc new file mode 100644 index 000000000..11b6a72f6 --- /dev/null +++ b/generate/templates/manual/src/functions/sleep_for_ms.cc @@ -0,0 +1,16 @@ +#ifdef WIN32 +#include +#else +#include +#endif // win32 + +void sleep_for_ms(int milliseconds) { + #ifdef WIN32 + Sleep(milliseconds); + #else + struct timespec t; + t.tv_sec = 0; + t.tv_nsec = milliseconds * 1000000; // 1 milliseconds == 1,000,000 nanoseconds + nanosleep(&t, NULL); + #endif +} diff --git a/generate/templates/manual/src/git_buf_converter.cc b/generate/templates/manual/src/git_buf_converter.cc new file mode 100644 index 000000000..371f14336 --- /dev/null +++ b/generate/templates/manual/src/git_buf_converter.cc @@ -0,0 +1,30 @@ +#include +#include +#include +#include + +#include "../include/git_buf_converter.h" +#include "git2/buffer.h" + +using namespace v8; +using namespace node; + +git_buf *StrArrayConverter::Convert(Local val) { + if (val->IsString() || val->IsStringObject()) { + string v8String = ConvertString(val->ToString()); + const size_t size = sizeof(git_buf); + uint8_t* memory = reinterpret_cast(malloc(size)); + git_buf *result = reinterpret_cast(memory); + size_t stringLength = v8String.length() - 1; + + memory = reinterpret_cast(malloc(stringLength)); + + memcpy(memory, v8String.c_str(), stringLength); + + result->size = stringLength; + result->ptr = reinterpret_cast(memory); + return result; + } else { + return NULL; + } +} diff --git a/generate/templates/manual/src/init_ssh2.cc b/generate/templates/manual/src/init_ssh2.cc new file mode 100644 index 000000000..a8e2543c2 --- /dev/null +++ b/generate/templates/manual/src/init_ssh2.cc @@ -0,0 +1,12 @@ +// We are initializing libssh2 from a separate .cc file to avoid ssize_t +// redefinition conflicts caused by incliding both node.h and libssh2.h from +// the same file (e.g. nodegit.cc) +// +// The redefinition can also be avoided by #defines but that is risky in case +// the libraries depend on the different definitions. + +#include + +void init_ssh2() { + libssh2_init(0); +} diff --git a/generate/templates/manual/src/lock_master.cc b/generate/templates/manual/src/lock_master.cc new file mode 100644 index 000000000..30679b534 --- /dev/null +++ b/generate/templates/manual/src/lock_master.cc @@ -0,0 +1,246 @@ +#include +#include +#include +#include +#include +#include +#include + +#include "../include/lock_master.h" + +// information about a lockable object +// - the mutex used to lock it and the number of outstanding locks +struct ObjectInfo { + uv_mutex_t *mutex; + unsigned useCount; + + ObjectInfo(uv_mutex_t *mutex, unsigned useCount) + : mutex(mutex), useCount(useCount) + {} +}; + +// LockMaster implementation details +// implemented in a separate class to keep LockMaster opaque +class LockMasterImpl { + // STATIC variables / methods + + // A map from objects that are locked (or were locked), to information on their mutex + static std::map mutexes; + // A mutex used for the mutexes map + static uv_mutex_t mapMutex; + + // A libuv key used to store the current thread-specific LockMasterImpl instance + static uv_key_t currentLockMasterKey; + + // Cleans up any mutexes that are not currently used + static NAN_GC_CALLBACK(CleanupMutexes); + +public: + static void Initialize(); + + // INSTANCE variables / methods + +private: + // The set of objects this LockMaster is responsible for locking + std::set objectsToLock; + + // Mutexes locked by this LockMaster on construction and unlocked on destruction + std::vector GetMutexes(int useCountDelta); + void Register(); + void Unregister(); + +public: + static LockMasterImpl *CurrentLockMasterImpl() { + return (LockMasterImpl *)uv_key_get(¤tLockMasterKey); + } + static LockMaster::Diagnostics GetDiagnostics(); + + LockMasterImpl() { + Register(); + } + + ~LockMasterImpl() { + Unregister(); + Unlock(true); + } + + void ObjectToLock(const void *objectToLock) { + objectsToLock.insert(objectToLock); + } + + void Lock(bool acquireMutexes); + void Unlock(bool releaseMutexes); +}; + +std::map LockMasterImpl::mutexes; +uv_mutex_t LockMasterImpl::mapMutex; +uv_key_t LockMasterImpl::currentLockMasterKey; + +void LockMasterImpl::Initialize() { + uv_mutex_init(&mapMutex); + uv_key_create(¤tLockMasterKey); + Nan::AddGCEpilogueCallback(CleanupMutexes); +} + +NAN_GC_CALLBACK(LockMasterImpl::CleanupMutexes) { + // skip cleanup if thread safety is disabled + // this means that turning thread safety on and then off + // could result in remaining mutexes - but they would get cleaned up + // if thread safety is turned on again + if (LockMaster::GetStatus() == LockMaster::Disabled) { + return; + } + + uv_mutex_lock(&mapMutex); + + for (auto it = mutexes.begin(); it != mutexes.end(); ) + { + uv_mutex_t *mutex = it->second.mutex; + unsigned useCount = it->second.useCount; + // if the mutex is not used by any LockMasters, + // we can destroy it + if (!useCount) { + uv_mutex_destroy(mutex); + free(mutex); + auto to_erase = it; + it++; + mutexes.erase(to_erase); + } else { + it++; + } + } + + uv_mutex_unlock(&mapMutex); +} + +void LockMaster::Initialize() { + LockMasterImpl::Initialize(); +} + +std::vector LockMasterImpl::GetMutexes(int useCountDelta) { + std::vector objectMutexes; + + uv_mutex_lock(&mapMutex); + + for (auto object : objectsToLock) { + if(object) { + // ensure we have an initialized mutex for each object + auto mutexIt = mutexes.find(object); + if(mutexIt == mutexes.end()) { + mutexIt = mutexes.insert( + std::make_pair( + object, + ObjectInfo((uv_mutex_t *)malloc(sizeof(uv_mutex_t)), 0U) + ) + ).first; + uv_mutex_init(mutexIt->second.mutex); + } + + objectMutexes.push_back(mutexIt->second.mutex); + mutexIt->second.useCount += useCountDelta; + } + } + + uv_mutex_unlock(&mapMutex); + + return objectMutexes; +} + +void LockMasterImpl::Register() { + uv_key_set(¤tLockMasterKey, this); +} + +void LockMasterImpl::Unregister() { + uv_key_set(¤tLockMasterKey, NULL); +} + +void LockMasterImpl::Lock(bool acquireMutexes) { + std::vector objectMutexes = GetMutexes(acquireMutexes * 1); + + auto alreadyLocked = objectMutexes.end(); + + // we will attempt to lock all the mutexes at the same time to avoid deadlocks + // note in most cases we are locking 0 or 1 mutexes. more than 1 implies + // passing objects with different repos/owners in the same call. + std::vector::iterator it; + do { + // go through all the mutexes and try to lock them + for(it = objectMutexes.begin(); it != objectMutexes.end(); it++) { + // if we already locked this mutex in a previous pass via uv_mutex_lock, + // we don't need to lock it again + if (it == alreadyLocked) { + continue; + } + // first, try to lock (non-blocking) + bool failure = uv_mutex_trylock(*it); + if(failure) { + // we have failed to lock a mutex... unlock everything we have locked + std::for_each(objectMutexes.begin(), it, uv_mutex_unlock); + if (alreadyLocked > it && alreadyLocked != objectMutexes.end()) { + uv_mutex_unlock(*alreadyLocked); + } + // now do a blocking lock on what we couldn't lock + uv_mutex_lock(*it); + // mark that we have already locked this one + // if there are more mutexes than this one, we will go back to locking everything + alreadyLocked = it; + break; + } + } + } while(it != objectMutexes.end()); +} + +void LockMasterImpl::Unlock(bool releaseMutexes) { + // Get the mutexes but don't decrement their use count until after we've + // unlocked them all. + std::vector objectMutexes = GetMutexes(0); + + std::for_each(objectMutexes.begin(), objectMutexes.end(), uv_mutex_unlock); + + GetMutexes(releaseMutexes * -1); +} + +LockMaster::Diagnostics LockMasterImpl::GetDiagnostics() { + LockMaster::Diagnostics diagnostics; + uv_mutex_lock(&LockMasterImpl::mapMutex); + diagnostics.storedMutexesCount = mutexes.size(); + uv_mutex_unlock(&LockMasterImpl::mapMutex); + return diagnostics; +} + +// LockMaster + +void LockMaster::ConstructorImpl() { + impl = new LockMasterImpl(); +} + +void LockMaster::DestructorImpl() { + delete impl; +} + +void LockMaster::ObjectToLock(const void *objectToLock) { + impl->ObjectToLock(objectToLock); +} + +void LockMaster::ObjectsToLockAdded() { + impl->Lock(true); +} + +LockMaster::Diagnostics LockMaster::GetDiagnostics() { + return LockMasterImpl::GetDiagnostics(); +} + +// LockMaster::TemporaryUnlock + +void LockMaster::TemporaryUnlock::ConstructorImpl() { + impl = LockMasterImpl::CurrentLockMasterImpl(); + if(impl) { + impl->Unlock(false); + } +} + +void LockMaster::TemporaryUnlock::DestructorImpl() { + impl->Lock(false); +} + +LockMaster::Status LockMaster::status = LockMaster::Disabled; diff --git a/generate/templates/manual/src/nodegit_wrapper.cc b/generate/templates/manual/src/nodegit_wrapper.cc new file mode 100644 index 000000000..ea6694425 --- /dev/null +++ b/generate/templates/manual/src/nodegit_wrapper.cc @@ -0,0 +1,114 @@ +template +NodeGitWrapper::NodeGitWrapper(typename Traits::cType *raw, bool selfFreeing, v8::Local owner) { + if (!owner.IsEmpty()) { + // if we have an owner, there are two options - either we duplicate the raw object + // (so we own the duplicate, and can self-free it) + // or we keep a handle on the owner so it doesn't get garbage collected + // while this wrapper is accessible + if(Traits::isDuplicable) { + Traits::duplicate(&this->raw, raw); + selfFreeing = true; + } else { + this->owner.Reset(owner); + this->raw = raw; + selfFreeing = false; + } + } else { + this->raw = raw; + } + this->selfFreeing = selfFreeing; + + if (selfFreeing) { + SelfFreeingInstanceCount++; + } else { + NonSelfFreeingConstructedCount++; + } +} + +template +NodeGitWrapper::NodeGitWrapper(const char *error) { + selfFreeing = false; + raw = NULL; + Nan::ThrowError(error); +} + +template +NodeGitWrapper::~NodeGitWrapper() { + if(Traits::isFreeable && selfFreeing) { + Traits::free(raw); + SelfFreeingInstanceCount--; + raw = NULL; + } +} + +template +NAN_METHOD(NodeGitWrapper::JSNewFunction) { + cppClass * instance; + + if (info.Length() == 0 || !info[0]->IsExternal()) { + Nan::TryCatch tryCatch; + instance = new cppClass(); + // handle the case where the default constructor is not supported + if(tryCatch.HasCaught()) { + delete instance; + tryCatch.ReThrow(); + return; + } + } else { + instance = new cppClass(static_cast( + Local::Cast(info[0])->Value()), + Nan::To(info[1]).FromJust(), + info.Length() >= 3 && !info[2].IsEmpty() && info[2]->IsObject() ? info[2]->ToObject() : Local() + ); + } + + instance->Wrap(info.This()); + info.GetReturnValue().Set(info.This()); +} + +template +v8::Local NodeGitWrapper::New(const typename Traits::cType *raw, bool selfFreeing, v8::Local owner) { + Nan::EscapableHandleScope scope; + Local argv[3] = { Nan::New((void *)raw), Nan::New(selfFreeing), owner }; + return scope.Escape( + Nan::NewInstance( + Nan::New(constructor_template), + owner.IsEmpty() ? 2 : 3, // passing an empty handle as part of the arguments causes a crash + argv + ).ToLocalChecked()); +} + +template +typename Traits::cType *NodeGitWrapper::GetValue() { + return raw; +} + +template +void NodeGitWrapper::ClearValue() { + raw = NULL; +} + +template +Nan::Persistent NodeGitWrapper::constructor_template; + +template +int NodeGitWrapper::SelfFreeingInstanceCount; + +template +int NodeGitWrapper::NonSelfFreeingConstructedCount; + +template +NAN_METHOD(NodeGitWrapper::GetSelfFreeingInstanceCount) { + info.GetReturnValue().Set(SelfFreeingInstanceCount); +} + +template +NAN_METHOD(NodeGitWrapper::GetNonSelfFreeingConstructedCount) { + info.GetReturnValue().Set(NonSelfFreeingConstructedCount); +} + +template +void NodeGitWrapper::InitializeTemplate(v8::Local &tpl) { + Nan::SetMethod(tpl, "getSelfFreeingInstanceCount", GetSelfFreeingInstanceCount); + Nan::SetMethod(tpl, "getNonSelfFreeingConstructedCount", GetNonSelfFreeingConstructedCount); +} diff --git a/generate/templates/manual/src/promise_completion.cc b/generate/templates/manual/src/promise_completion.cc new file mode 100644 index 000000000..4b003d80f --- /dev/null +++ b/generate/templates/manual/src/promise_completion.cc @@ -0,0 +1,103 @@ +#include "../include/promise_completion.h" + +Nan::Persistent PromiseCompletion::newFn; +Nan::Persistent PromiseCompletion::promiseFulfilled; +Nan::Persistent PromiseCompletion::promiseRejected; + +// initializes the persistent handles for NAN_METHODs +void PromiseCompletion::InitializeComponent() { + v8::Local newTemplate = Nan::New(New); + newTemplate->InstanceTemplate()->SetInternalFieldCount(1); + newFn.Reset(newTemplate->GetFunction()); + + promiseFulfilled.Reset(Nan::New(PromiseFulfilled)->GetFunction()); + promiseRejected.Reset(Nan::New(PromiseRejected)->GetFunction()); +} + +bool PromiseCompletion::ForwardIfPromise(v8::Local result, AsyncBaton *baton, Callback callback) +{ + Nan::HandleScope scope; + + // check if the result is a promise + if (result->IsObject()) { + Nan::MaybeLocal maybeThenProp = Nan::Get(result->ToObject(), Nan::New("then").ToLocalChecked()); + if (!maybeThenProp.IsEmpty()) { + v8::Local thenProp = maybeThenProp.ToLocalChecked(); + if(thenProp->IsFunction()) { + // we can be reasonably certain that the result is a promise + + // create a new v8 instance of PromiseCompletion + v8::Local object = Nan::NewInstance(Nan::New(newFn)).ToLocalChecked(); + + // set up the native PromiseCompletion object + PromiseCompletion *promiseCompletion = ObjectWrap::Unwrap(object); + promiseCompletion->Setup(thenProp.As(), result, baton, callback); + + return true; + } + } + } + + return false; +} + +// creates a new instance of PromiseCompletion, wrapped in a v8 object +NAN_METHOD(PromiseCompletion::New) { + PromiseCompletion *promiseCompletion = new PromiseCompletion(); + promiseCompletion->Wrap(info.This()); + info.GetReturnValue().Set(info.This()); +} + +// sets up a Promise to forward the promise result via the baton and callback +void PromiseCompletion::Setup(v8::Local thenFn, v8::Local result, AsyncBaton *baton, Callback callback) { + this->callback = callback; + this->baton = baton; + + v8::Local promise = result->ToObject(); + + v8::Local thisHandle = handle(); + + v8::Local argv[2] = { + Bind(promiseFulfilled, thisHandle), + Bind(promiseRejected, thisHandle) + }; + + // call the promise's .then method with resolve and reject callbacks + Nan::Callback(thenFn).Call(promise, 2, argv); +} + +// binds an object to be the context of the function. +// there might be a better way to do this than calling Function.bind... +v8::Local PromiseCompletion::Bind(Nan::Persistent &function, v8::Local object) { + Nan::EscapableHandleScope scope; + + v8::Local bind = + Nan::Get(Nan::New(function), Nan::New("bind").ToLocalChecked()) + .ToLocalChecked().As(); + + v8::Local argv[1] = { object }; + + return scope.Escape(bind->Call(Nan::New(function), 1, argv)); +} + +// calls the callback stored in the PromiseCompletion, passing the baton that +// was provided in construction +void PromiseCompletion::CallCallback(bool isFulfilled, const Nan::FunctionCallbackInfo &info) { + v8::Local resultOfPromise; + + if (info.Length() > 0) { + resultOfPromise = info[0]; + } + + PromiseCompletion *promiseCompletion = ObjectWrap::Unwrap(info.This()->ToObject()); + + (*promiseCompletion->callback)(isFulfilled, promiseCompletion->baton, resultOfPromise); +} + +NAN_METHOD(PromiseCompletion::PromiseFulfilled) { + CallCallback(true, info); +} + +NAN_METHOD(PromiseCompletion::PromiseRejected) { + CallCallback(false, info); +} diff --git a/generate/templates/manual/src/str_array_converter.cc b/generate/templates/manual/src/str_array_converter.cc new file mode 100644 index 000000000..c66f901c3 --- /dev/null +++ b/generate/templates/manual/src/str_array_converter.cc @@ -0,0 +1,64 @@ +#include +#include +#include +#include + +#include "../include/str_array_converter.h" +#include "git2/strarray.h" + +using namespace v8; +using namespace node; + +git_strarray *StrArrayConverter::Convert(Local val) { + if (!val->BooleanValue()) { + return NULL; + } + else if (val->IsArray()) { + return ConvertArray(Array::Cast(*val)); + } + else if (val->IsString() || val->IsStringObject()) { + return ConvertString(val->ToString()); + } + else { + return NULL; + } +} + +git_strarray * StrArrayConverter::AllocStrArray(const size_t count) { + const size_t size = sizeof(git_strarray) + (sizeof(char*) * count); + uint8_t* memory = reinterpret_cast(malloc(size)); + git_strarray *result = reinterpret_cast(memory); + result->count = count; + result->strings = reinterpret_cast(memory + sizeof(git_strarray)); + return result; +} + +git_strarray *StrArrayConverter::ConvertArray(Array *val) { + git_strarray *result = AllocStrArray(val->Length()); + + for(size_t i = 0; i < result->count; i++) { + Nan::Utf8String entry(val->Get(i)); + result->strings[i] = strdup(*entry); + } + + return result; +} + +git_strarray* StrArrayConverter::ConvertString(Local val) { + char *strings[1]; + Nan::Utf8String utf8String(val); + + strings[0] = *utf8String; + + return ConstructStrArray(1, strings); +} + +git_strarray *StrArrayConverter::ConstructStrArray(int argc, char** argv) { + git_strarray *result = AllocStrArray(argc); + + for(size_t i = 0; i < result->count; i++) { + result->strings[i] = strdup(argv[i]); + } + + return result; +} diff --git a/generate/templates/manual/src/thread_pool.cc b/generate/templates/manual/src/thread_pool.cc new file mode 100644 index 000000000..abf7a29a8 --- /dev/null +++ b/generate/templates/manual/src/thread_pool.cc @@ -0,0 +1,85 @@ +#include "../include/thread_pool.h" + +ThreadPool::ThreadPool(int numberOfThreads, uv_loop_t *loop) { + uv_mutex_init(&workMutex); + uv_sem_init(&workSemaphore, 0); + + uv_async_init(loop, &loopAsync, RunLoopCallbacks); + loopAsync.data = this; + uv_unref((uv_handle_t *)&loopAsync); + uv_mutex_init(&loopMutex); + + workInProgressCount = 0; + + for(int i=0; i(threadPool)->RunEventQueue(); +} + +void ThreadPool::RunEventQueue() { + for ( ; ; ) { + // wait until there is work to do + uv_sem_wait(&workSemaphore); + uv_mutex_lock(&workMutex); + // the semaphore should guarantee that queue is not empty + Work work = workQueue.front(); + workQueue.pop(); + uv_mutex_unlock(&workMutex); + + // perform the queued work + (*work.workCallback)(work.data); + + // schedule the callback on the loop + uv_mutex_lock(&loopMutex); + loopQueue.push(work); + uv_mutex_unlock(&loopMutex); + uv_async_send(&loopAsync); + } +} + +void ThreadPool::RunLoopCallbacks(uv_async_t* handle) { + static_cast(handle->data)->RunLoopCallbacks(); +} + +void ThreadPool::RunLoopCallbacks() { + // uv_async_send can coalesce calls, so we are not guaranteed one + // RunLoopCallbacks per uv_async_send call + // so we always process the entire loopQueue + int callbacksCompleted = 0; + uv_mutex_lock(&loopMutex); + while(!loopQueue.empty()) { + Work work = loopQueue.front(); + loopQueue.pop(); + uv_mutex_unlock(&loopMutex); + // perform the queued loop callback + (*work.loopCallback)(work.data); + callbacksCompleted++; + uv_mutex_lock(&loopMutex); + } + uv_mutex_unlock(&loopMutex); + + uv_mutex_lock(&workMutex); + // if there is no ongoing work / completion processing, node doesn't need + // to be prevented from terminating + workInProgressCount -= callbacksCompleted; + if(!workInProgressCount) { + uv_unref((uv_handle_t *)&loopAsync); + } + uv_mutex_unlock(&workMutex); +} diff --git a/generate/templates/manual/src/wrapper.cc b/generate/templates/manual/src/wrapper.cc new file mode 100644 index 000000000..3f2ea6121 --- /dev/null +++ b/generate/templates/manual/src/wrapper.cc @@ -0,0 +1,80 @@ +/** + * This code is auto-generated; unless you know what you're doing, do not modify! + **/ +#include +#include +#include +#include + +#include "../include/wrapper.h" +#include "node_buffer.h" + +using namespace v8; +using namespace node; + +Wrapper::Wrapper(void *raw) { + this->raw = raw; +} + +void Wrapper::InitializeComponent(Local target) { + Nan::HandleScope scope; + + Local tpl = Nan::New(JSNewFunction); + + tpl->InstanceTemplate()->SetInternalFieldCount(1); + tpl->SetClassName(Nan::New("Wrapper").ToLocalChecked()); + + Nan::SetPrototypeMethod(tpl, "toBuffer", ToBuffer); + + constructor_template.Reset(tpl); + Nan::Set(target, Nan::New("Wrapper").ToLocalChecked(), Nan::GetFunction(tpl).ToLocalChecked()); +} + +NAN_METHOD(Wrapper::JSNewFunction) { + + if (info.Length() == 0 || !info[0]->IsExternal()) { + return Nan::ThrowError("void * is required."); + } + + Wrapper* object = new Wrapper(External::Cast(*info[0])->Value()); + object->Wrap(info.This()); + + info.GetReturnValue().Set(info.This()); +} + +Local Wrapper::New(const void *raw) { + Nan::EscapableHandleScope scope; + + Local argv[1] = { Nan::New((void *)raw) }; + Local instance; + Local constructorHandle = Nan::New(constructor_template); + instance = Nan::NewInstance(Nan::GetFunction(constructorHandle).ToLocalChecked(), 1, argv).ToLocalChecked(); + + return scope.Escape(instance); +} + +void *Wrapper::GetValue() { + return this->raw; +} + +NAN_METHOD(Wrapper::ToBuffer) { + + if(info.Length() == 0 || !info[0]->IsNumber()) { + return Nan::ThrowError("Number is required."); + } + + int len = info[0]->ToNumber()->Value(); + + Local bufferConstructor = Local::Cast( + Nan::Get(Nan::GetCurrentContext()->Global(), Nan::New("Buffer").ToLocalChecked()).ToLocalChecked()); + + Local constructorArgs[1] = { Nan::New(len) }; + Local nodeBuffer = Nan::NewInstance(bufferConstructor, 1, constructorArgs).ToLocalChecked(); + + std::memcpy(node::Buffer::Data(nodeBuffer), Nan::ObjectWrap::Unwrap(info.This())->GetValue(), len); + + info.GetReturnValue().Set(nodeBuffer); +} + + +Nan::Persistent Wrapper::constructor_template; diff --git a/generate/templates/partials/async_function.cc b/generate/templates/partials/async_function.cc new file mode 100644 index 000000000..3d3227d4a --- /dev/null +++ b/generate/templates/partials/async_function.cc @@ -0,0 +1,277 @@ + +{%partial doc .%} +NAN_METHOD({{ cppClassName }}::{{ cppFunctionName }}) { + {%partial guardArguments .%} + if (info.Length() == {{args|jsArgsCount}} || !info[{{args|jsArgsCount}}]->IsFunction()) { + return Nan::ThrowError("Callback is required and must be a Function."); + } + + {{ cppFunctionName }}Baton* baton = new {{ cppFunctionName }}Baton; + + baton->error_code = GIT_OK; + baton->error = NULL; + + {%each args|argsInfo as arg %} + {%if arg.globalPayload %} + {{ cppFunctionName }}_globalPayload* globalPayload = new {{ cppFunctionName }}_globalPayload; + {%endif%} + {%if arg.cppClassName == "GitBuf" %} + baton->{{arg.name}} = ({{ arg.cType }})malloc(sizeof({{ arg.cType|replace '*' '' }}));; + baton->{{arg.name}}->ptr = NULL; + baton->{{arg.name}}->size = baton->{{arg.name}}->asize = 0; + {%endif%} + {%endeach%} + + {%each args|argsInfo as arg %} + {%if not arg.isReturn %} + {%if arg.isSelf %} + baton->{{ arg.name }} = Nan::ObjectWrap::Unwrap<{{ arg.cppClassName }}>(info.This())->GetValue(); + {%elsif arg.isCallbackFunction %} + if (!info[{{ arg.jsArg }}]->IsFunction()) { + baton->{{ arg.name }} = NULL; + {%if arg.payload.globalPayload %} + globalPayload->{{ arg.name }} = NULL; + {%else%} + baton->{{ arg.payload.name }} = NULL; + {%endif%} + } + else { + baton->{{ arg.name}} = {{ cppFunctionName }}_{{ arg.name }}_cppCallback; + {%if arg.payload.globalPayload %} + globalPayload->{{ arg.name }} = new Nan::Callback(info[{{ arg.jsArg }}].As()); + {%else%} + baton->{{ arg.payload.name }} = new Nan::Callback(info[{{ arg.jsArg }}].As()); + {%endif%} + } + {%elsif arg.payloadFor %} + {%if arg.globalPayload %} + baton->{{ arg.name }} = globalPayload; + {%endif%} + {%elsif arg.name %} + {%partial convertFromV8 arg%} + {%if not arg.payloadFor %} + baton->{{ arg.name }} = from_{{ arg.name }}; + {%if arg | isOid %} + baton->{{ arg.name }}NeedsFree = info[{{ arg.jsArg }}]->IsString(); + {%endif%} + {%endif%} + {%endif%} + {%elsif arg.shouldAlloc %} + baton->{{ arg.name }} = ({{ arg.cType }})malloc(sizeof({{ arg.cType|replace '*' '' }})); + {%endif%} + {%endeach%} + + Nan::Callback *callback = new Nan::Callback(Local::Cast(info[{{args|jsArgsCount}}])); + {{ cppFunctionName }}Worker *worker = new {{ cppFunctionName }}Worker(baton, callback); + {%each args|argsInfo as arg %} + {%if not arg.isReturn %} + {%if arg.isSelf %} + worker->SaveToPersistent("{{ arg.name }}", info.This()); + {%elsif not arg.isCallbackFunction %} + if (!info[{{ arg.jsArg }}]->IsUndefined() && !info[{{ arg.jsArg }}]->IsNull()) + worker->SaveToPersistent("{{ arg.name }}", info[{{ arg.jsArg }}]->ToObject()); + {%endif%} + {%endif%} + {%endeach%} + + AsyncLibgit2QueueWorker(worker); + return; +} + +void {{ cppClassName }}::{{ cppFunctionName }}Worker::Execute() { + giterr_clear(); + + { + LockMaster lockMaster(/*asyncAction: */true{%each args|argsInfo as arg %} + {%if arg.cType|isPointer%}{%if not arg.cType|isDoublePointer%} + ,baton->{{ arg.name }} + {%endif%}{%endif%} + {%endeach%}); + + {%if .|hasReturnType %} + {{ return.cType }} result = {{ cFunctionName }}( + {%else%} + {{ cFunctionName }}( + {%endif%} + {%-- Insert Function Arguments --%} + {%each args|argsInfo as arg %} + {%-- turn the pointer into a ref --%} + {%if arg.isReturn|and arg.cType|isDoublePointer %}&{%endif%}baton->{{ arg.name }}{%if not arg.lastArg %},{%endif%} + + {%endeach%} + ); + + {%if return.isResultOrError %} + baton->error_code = result; + if (result < GIT_OK && giterr_last() != NULL) { + baton->error = git_error_dup(giterr_last()); + } + + {%elsif return.isErrorCode %} + baton->error_code = result; + + if (result != GIT_OK && giterr_last() != NULL) { + baton->error = git_error_dup(giterr_last()); + } + + {%elsif not return.cType == 'void' %} + + baton->result = result; + + {%endif%} + } +} + +void {{ cppClassName }}::{{ cppFunctionName }}Worker::HandleOKCallback() { + {%if return.isResultOrError %} + if (baton->error_code >= GIT_OK) { + {%else%} + if (baton->error_code == GIT_OK) { + {%endif%} + {%if return.isResultOrError %} + Local result = Nan::New(baton->error_code); + + {%elsif not .|returnsCount %} + Local result = Nan::Undefined(); + {%else%} + Local to; + {%if .|returnsCount > 1 %} + Local result = Nan::New(); + {%endif%} + {%each .|returnsInfo 0 1 as _return %} + {%partial convertToV8 _return %} + {%if .|returnsCount > 1 %} + Nan::Set(result, Nan::New("{{ _return.returnNameOrName }}").ToLocalChecked(), to); + {%endif%} + {%endeach%} + {%if .|returnsCount == 1 %} + Local result = to; + {%endif%} + {%endif%} + Local argv[2] = { + Nan::Null(), + result + }; + callback->Call(2, argv); + } else { + if (baton->error) { + Local argv[1] = { + Nan::Error(baton->error->message) + }; + callback->Call(1, argv); + if (baton->error->message) + free((void *)baton->error->message); + free((void *)baton->error); + } else if (baton->error_code < 0) { + std::queue< Local > workerArguments; +{%each args|argsInfo as arg %} + {%if not arg.isReturn %} + {%if not arg.isSelf %} + {%if not arg.isCallbackFunction %} + workerArguments.push(GetFromPersistent("{{ arg.name }}")); + {%endif%} + {%endif%} + {%endif%} +{%endeach%} + bool callbackFired = false; + while(!workerArguments.empty()) { + Local node = workerArguments.front(); + workerArguments.pop(); + + if ( + !node->IsObject() + || node->IsArray() + || node->IsBooleanObject() + || node->IsDate() + || node->IsFunction() + || node->IsNumberObject() + || node->IsRegExp() + || node->IsStringObject() + ) { + continue; + } + + Local nodeObj = node->ToObject(); + Local checkValue = GetPrivate(nodeObj, Nan::New("NodeGitPromiseError").ToLocalChecked()); + + if (!checkValue.IsEmpty() && !checkValue->IsNull() && !checkValue->IsUndefined()) { + Local argv[1] = { + checkValue->ToObject() + }; + callback->Call(1, argv); + callbackFired = true; + break; + } + + Local properties = nodeObj->GetPropertyNames(); + for (unsigned int propIndex = 0; propIndex < properties->Length(); ++propIndex) { + Local propName = properties->Get(propIndex)->ToString(); + Local nodeToQueue = nodeObj->Get(propName); + if (!nodeToQueue->IsUndefined()) { + workerArguments.push(nodeToQueue); + } + } + } + + if (!callbackFired) { + Local err = Nan::Error("Method {{ jsFunctionName }} has thrown an error.")->ToObject(); + err->Set(Nan::New("errno").ToLocalChecked(), Nan::New(baton->error_code)); + Local argv[1] = { + err + }; + callback->Call(1, argv); + } + } else { + callback->Call(0, NULL); + } + + {%each args|argsInfo as arg %} + {%if arg.shouldAlloc %} + {%if not arg.isCppClassStringOrArray %} + {%elsif arg | isOid %} + if (baton->{{ arg.name}}NeedsFree) { + baton->{{ arg.name}}NeedsFree = false; + free((void*)baton->{{ arg.name }}); + } + {%elsif arg.isCallbackFunction %} + {%if not arg.payload.globalPayload %} + delete baton->{{ arg.payload.name }}; + {%endif%} + {%elsif arg.globalPayload %} + delete ({{ cppFunctionName}}_globalPayload*)baton->{{ arg.name }}; + {%else%} + free((void*)baton->{{ arg.name }}); + {%endif%} + {%endif%} + {%endeach%} + } + + {%each args|argsInfo as arg %} + {%if arg.isCppClassStringOrArray %} + {%if arg.freeFunctionName %} + {{ arg.freeFunctionName }}(baton->{{ arg.name }}); + {%elsif not arg.isConst%} + free((void *)baton->{{ arg.name }}); + {%endif%} + {%elsif arg | isOid %} + if (baton->{{ arg.name}}NeedsFree) { + baton->{{ arg.name}}NeedsFree = false; + free((void *)baton->{{ arg.name }}); + } + {%elsif arg.isCallbackFunction %} + {%if not arg.payload.globalPayload %} + delete baton->{{ arg.payload.name }}; + {%endif%} + {%elsif arg.globalPayload %} + delete ({{ cppFunctionName}}_globalPayload*)baton->{{ arg.name }}; + {%endif%} + {%if arg.cppClassName == "GitBuf" %} + git_buf_free(baton->{{ arg.name }}); + free((void *)baton->{{ arg.name }}); + {%endif%} + {%endeach%} + + delete baton; +} + +{%partial callbackHelpers .%} diff --git a/generate/templates/partials/callback_helpers.cc b/generate/templates/partials/callback_helpers.cc new file mode 100644 index 000000000..c4b435787 --- /dev/null +++ b/generate/templates/partials/callback_helpers.cc @@ -0,0 +1,138 @@ +{%each args as cbFunction %} + {%if cbFunction.isCallbackFunction %} + +{{ cbFunction.return.type }} {{ cppClassName }}::{{ cppFunctionName }}_{{ cbFunction.name }}_cppCallback ( + {% each cbFunction.args|argsInfo as arg %} + {{ arg.cType }} {{ arg.name}}{% if not arg.lastArg %},{% endif %} + {% endeach %} +) { + {{ cppFunctionName }}_{{ cbFunction.name|titleCase }}Baton* baton = + new {{ cppFunctionName }}_{{ cbFunction.name|titleCase }}Baton({{ cbFunction.return.noResults }}); + + {% each cbFunction.args|argsInfo as arg %} + baton->{{ arg.name }} = {{ arg.name }}; + {% endeach %} + + return baton->ExecuteAsync((uv_async_cb) {{ cppFunctionName }}_{{ cbFunction.name }}_async); +} + +void {{ cppClassName }}::{{ cppFunctionName }}_{{ cbFunction.name }}_async(uv_async_t* req, int status) { + Nan::HandleScope scope; + + {{ cppFunctionName }}_{{ cbFunction.name|titleCase }}Baton* baton = static_cast<{{ cppFunctionName }}_{{ cbFunction.name|titleCase }}Baton*>(req->data); + + {% each cbFunction.args|argsInfo as arg %} + {% if arg | isPayload %} + {% if cbFunction.payload.globalPayload %} + Nan::Callback* callback = (({{ cppFunctionName }}_globalPayload*)baton->{{ arg.name }})->{{ cbFunction.name }}; + {% else %} + Nan::Callback* callback = (Nan::Callback *)baton->{{ arg.name }}; + {% endif %} + {% endif %} + {% endeach %} + + Local argv[{{ cbFunction.args|jsArgsCount }}] = { + {% each cbFunction.args|argsInfo as arg %} + {% if arg | isPayload %} + {%-- payload is always the last arg --%} + // payload is null because we can use closure scope in javascript + Nan::Undefined() + {% elsif arg.isJsArg %} + {% if arg.isEnum %} + Nan::New((int)baton->{{ arg.name }}), + {% elsif arg.isLibgitType %} + {{ arg.cppClassName }}::New(baton->{{ arg.name }}, false), + {% elsif arg.cType == "size_t" %} + // HACK: NAN should really have an overload for Nan::New to support size_t + Nan::New((unsigned int)baton->{{ arg.name }}), + {% elsif arg.cppClassName == 'String' %} + Nan::New(baton->{{ arg.name }}).ToLocalChecked(), + {% else %} + Nan::New(baton->{{ arg.name }}), + {% endif %} + {% endif %} + {% endeach %} + }; + + Nan::TryCatch tryCatch; + Local result = callback->Call({{ cbFunction.args|jsArgsCount }}, argv); + + uv_close((uv_handle_t*) &baton->req, NULL); + + if(PromiseCompletion::ForwardIfPromise(result, baton, {{ cppFunctionName }}_{{ cbFunction.name }}_promiseCompleted)) { + return; + } + + {% each cbFunction|returnsInfo false true as _return %} + if (result.IsEmpty() || result->IsNativeError()) { + baton->result = {{ cbFunction.return.error }}; + } + else if (!result->IsNull() && !result->IsUndefined()) { + {% if _return.isOutParam %} + {{ _return.cppClassName }}* wrapper = Nan::ObjectWrap::Unwrap<{{ _return.cppClassName }}>(result->ToObject()); + wrapper->selfFreeing = false; + + *baton->{{ _return.name }} = wrapper->GetValue(); + baton->result = {{ cbFunction.return.success }}; + {% else %} + if (result->IsNumber()) { + baton->result = (int)result->ToNumber()->Value(); + } + else { + baton->result = baton->defaultResult; + } + {% endif %} + } + else { + baton->result = baton->defaultResult; + } + {% endeach %} + + baton->done = true; +} + +void {{ cppClassName }}::{{ cppFunctionName }}_{{ cbFunction.name }}_promiseCompleted(bool isFulfilled, AsyncBaton *_baton, v8::Local result) { + Nan::HandleScope scope; + + {{ cppFunctionName }}_{{ cbFunction.name|titleCase }}Baton* baton = static_cast<{{ cppFunctionName }}_{{ cbFunction.name|titleCase }}Baton*>(_baton); + + if (isFulfilled) { + {% each cbFunction|returnsInfo false true as _return %} + if (result.IsEmpty() || result->IsNativeError()) { + baton->result = {{ cbFunction.return.error }}; + } + else if (!result->IsNull() && !result->IsUndefined()) { + {% if _return.isOutParam %} + {{ _return.cppClassName }}* wrapper = Nan::ObjectWrap::Unwrap<{{ _return.cppClassName }}>(result->ToObject()); + wrapper->selfFreeing = false; + + *baton->{{ _return.name }} = wrapper->GetValue(); + baton->result = {{ cbFunction.return.success }}; + {% else %} + if (result->IsNumber()) { + baton->result = (int)result->ToNumber()->Value(); + } + else { + baton->result = baton->defaultResult; + } + {% endif %} + } + else { + baton->result = baton->defaultResult; + } + {% endeach %} + } + else { + // promise was rejected + {{ cppClassName }}* instance = static_cast<{{ cppClassName }}*>(baton->{% each cbFunction.args|argsInfo as arg %} + {% if arg.payload == true %}{{arg.name}}{% elsif arg.lastArg %}{{arg.name}}{% endif %} + {% endeach %}); + Local parent = instance->handle(); + SetPrivate(parent, Nan::New("NodeGitPromiseError").ToLocalChecked(), result); + + baton->result = {{ cbFunction.return.error }}; + } + baton->done = true; +} + {%endif%} +{%endeach%} diff --git a/generate/templates/partials/convert_from_v8.cc b/generate/templates/partials/convert_from_v8.cc new file mode 100644 index 000000000..425e691f4 --- /dev/null +++ b/generate/templates/partials/convert_from_v8.cc @@ -0,0 +1,106 @@ +{%if not isPayload %} +// start convert_from_v8 block + {{ cType }} from_{{ name }}; + {%if isOptional | or isBoolean %} + + {%if cppClassName == 'GitStrarray'%} + {%-- Print nothing --%} + {% elsif cppClassName == 'GitBuf' %} + {%-- Print nothing --%} + {%else%} + if (info[{{ jsArg }}]->Is{{ cppClassName|cppToV8 }}()) { + {%endif%} + {%endif%} + {%if cppClassName == 'String'%} + + String::Utf8Value {{ name }}(info[{{ jsArg }}]->ToString()); + // malloc with one extra byte so we can add the terminating null character C-strings expect: + from_{{ name }} = ({{ cType }}) malloc({{ name }}.length() + 1); + // copy the characters from the nodejs string into our C-string (used instead of strdup or strcpy because nulls in + // the middle of strings are valid coming from nodejs): + memcpy((void *)from_{{ name }}, *{{ name }}, {{ name }}.length()); + // ensure the final byte of our new string is null, extra casts added to ensure compatibility with various C types + // used in the nodejs binding generation: + memset((void *)(((char *)from_{{ name }}) + {{ name }}.length()), 0, 1); + {%elsif cppClassName == 'GitStrarray' %} + + from_{{ name }} = StrArrayConverter::Convert(info[{{ jsArg }}]); + {%elsif cppClassName == 'GitBuf' %} + + from_{{ name }} = GitBufConverter::Convert(info[{{ jsArg }}]); + {%elsif cppClassName == 'Wrapper'%} + + String::Utf8Value {{ name }}(info[{{ jsArg }}]->ToString()); + // malloc with one extra byte so we can add the terminating null character C-strings expect: + from_{{ name }} = ({{ cType }}) malloc({{ name }}.length() + 1); + // copy the characters from the nodejs string into our C-string (used instead of strdup or strcpy because nulls in + // the middle of strings are valid coming from nodejs): + memcpy((void *)from_{{ name }}, *{{ name }}, {{ name }}.length()); + // ensure the final byte of our new string is null, extra casts added to ensure compatibility with various C types + // used in the nodejs binding generation: + memset((void *)(((char *)from_{{ name }}) + {{ name }}.length()), 0, 1); + {%elsif cppClassName == 'Array'%} + + Array *tmp_{{ name }} = Array::Cast(*info[{{ jsArg }}]); + from_{{ name }} = ({{ cType }})malloc(tmp_{{ name }}->Length() * sizeof({{ cType|replace '**' '*' }})); + for (unsigned int i = 0; i < tmp_{{ name }}->Length(); i++) { + {%-- + // FIXME: should recursively call convertFromv8. + --%} + from_{{ name }}[i] = Nan::ObjectWrap::Unwrap<{{ arrayElementCppClassName }}>(tmp_{{ name }}->Get(Nan::New(static_cast(i)))->ToObject())->GetValue(); + } + {%elsif cppClassName == 'Function'%} + {%elsif cppClassName == 'Buffer'%} + + from_{{ name }} = Buffer::Data(info[{{ jsArg }}]->ToObject()); + {%elsif cppClassName|isV8Value %} + + {%if cType|isPointer %} + *from_{{ name }} = ({{ cType|unPointer }}) {{ cast }} {%if isEnum %}(int){%endif%} info[{{ jsArg }}]->To{{ cppClassName }}()->Value(); + {%else%} + from_{{ name }} = ({{ cType }}) {{ cast }} {%if isEnum %}(int){%endif%} info[{{ jsArg }}]->To{{ cppClassName }}()->Value(); + {%endif%} + {%elsif cppClassName == 'GitOid'%} + if (info[{{ jsArg }}]->IsString()) { + // Try and parse in a string to a git_oid + String::Utf8Value oidString(info[{{ jsArg }}]->ToString()); + git_oid *oidOut = (git_oid *)malloc(sizeof(git_oid)); + + if (git_oid_fromstr(oidOut, (const char *) strdup(*oidString)) != GIT_OK) { + free(oidOut); + + if (giterr_last()) { + return Nan::ThrowError(giterr_last()->message); + } else { + return Nan::ThrowError("Unknown Error"); + } + } + + {%if cType|isDoublePointer %} + from_{{ name }} = &oidOut; + {%else%} + from_{{ name }} = oidOut; + {%endif%} + } + else { + {%if cType|isDoublePointer %}*{%endif%}from_{{ name }} = Nan::ObjectWrap::Unwrap<{{ cppClassName }}>(info[{{ jsArg }}]->ToObject())->GetValue(); + } + {%else%} + {%if cType|isDoublePointer %}*{%endif%}from_{{ name }} = Nan::ObjectWrap::Unwrap<{{ cppClassName }}>(info[{{ jsArg }}]->ToObject())->GetValue(); + {%endif%} + + {%if isBoolean %} + } + else { + from_{{ name }} = info[{{ jsArg }}]->IsTrue() ? 1 : 0; + } + {%elsif isOptional %} + {%if cppClassName != 'GitStrarray'%} + } + else { + from_{{ name }} = 0; + } + {%endif%} + {%endif%} +// end convert_from_v8 block +{%endif%} diff --git a/generate/templates/partials/convert_to_v8.cc b/generate/templates/partials/convert_to_v8.cc new file mode 100644 index 000000000..d9b1d8830 --- /dev/null +++ b/generate/templates/partials/convert_to_v8.cc @@ -0,0 +1,72 @@ +// start convert_to_v8 block +{% if cppClassName == 'String' %} + if ({{= parsedName =}}){ + {% if size %} + to = Nan::New({{= parsedName =}}, {{ size }}).ToLocalChecked(); + {% elsif cType == 'char **' %} + to = Nan::New(*{{= parsedName =}}).ToLocalChecked(); + {% else %} + to = Nan::New({{= parsedName =}}).ToLocalChecked(); + {% endif %} + } + else { + to = Nan::Null(); + } + + {% if freeFunctionName %} + {{ freeFunctionName }}({{= parsedName =}}); + {% endif %} + +{% elsif cppClassName|isV8Value %} + + {% if isCppClassIntType %} + to = Nan::New<{{ cppClassName }}>(({{ parsedClassName }}){{= parsedName =}}); + {% else %} + to = Nan::New<{{ cppClassName }}>({% if needsDereference %}*{% endif %}{{= parsedName =}}); + {% endif %} + +{% elsif cppClassName == 'External' %} + + to = Nan::New((void *){{= parsedName =}}); + +{% elsif cppClassName == 'Array' %} + + {%-- // FIXME this is not general purpose enough. --%} + {% if size %} + Local tmpArray = Nan::New({{= parsedName =}}->{{ size }}); + for (unsigned int i = 0; i < {{= parsedName =}}->{{ size }}; i++) { + Nan::Set(tmpArray, Nan::New(i), Nan::New({{= parsedName =}}->{{ key }}[i]).ToLocalChecked()); + } + {% else %} + Local tmpArray = Nan::New({{= parsedName =}}); + {% endif %} + + to = tmpArray; +{% elsif cppClassName == 'GitBuf' %} + if ({{= parsedName =}}) { + to = Nan::New({{= parsedName =}}->ptr, {{= parsedName = }}->size).ToLocalChecked(); + } + else { + to = Nan::Null(); + } +{% else %} + {% if copy %} + if ({{= parsedName =}} != NULL) { + {{= parsedName =}} = ({{ cType|replace '**' '*' }} {% if not cType|isPointer %}*{% endif %}){{ copy }}({{= parsedName =}}); + } + {% endif %} + + if ({{= parsedName =}} != NULL) { + // {{= cppClassName }} {{= parsedName }} + {% if cppClassName == 'Wrapper' %} + to = {{ cppClassName }}::New({{= parsedName =}}); + {% else %} + to = {{ cppClassName }}::New({{= parsedName =}}, {{ selfFreeing|toBool }} {% if ownedByThis %}, info.This(){% endif %}); + {% endif %} + } + else { + to = Nan::Null(); + } + +{% endif %} +// end convert_to_v8 block diff --git a/generate/templates/partials/doc.cc b/generate/templates/partials/doc.cc new file mode 100644 index 000000000..53094312b --- /dev/null +++ b/generate/templates/partials/doc.cc @@ -0,0 +1,15 @@ +/* +{% each args as arg %} + {% if not arg.isReturn %} + {% if not arg.isSelf %} + * @param {{ arg.jsClassName }} {{ arg.name }} + {% endif %} + {% endif %} +{% endeach %}{% each .|returnsInfo as returnInfo %} + {% if isAsync %} + * @param {{ returnInfo.jsOrCppClassName }} callback + {% else %} + * @return {{ returnInfo.jsOrCppClassName }} {% if returnInfo.name %}{{ returnInfo.name }}{% else %}result{% endif %} + {% endif %} +{% endeach %} + */ diff --git a/generate/templates/partials/field_accessors.cc b/generate/templates/partials/field_accessors.cc new file mode 100644 index 000000000..0098d06b4 --- /dev/null +++ b/generate/templates/partials/field_accessors.cc @@ -0,0 +1,260 @@ +{% each fields|fieldsInfo as field %} + {% if not field.ignore %} + NAN_GETTER({{ cppClassName }}::Get{{ field.cppFunctionName }}) { + + {{ cppClassName }} *wrapper = Nan::ObjectWrap::Unwrap<{{ cppClassName }}>(info.This()); + + {% if field.isEnum %} + info.GetReturnValue().Set(Nan::New((int)wrapper->GetValue()->{{ field.name }})); + + {% elsif field.isLibgitType | or field.payloadFor %} + info.GetReturnValue().Set(Nan::New(wrapper->{{ field.name }})); + + {% elsif field.isCallbackFunction %} + if (wrapper->{{field.name}}.HasCallback()) { + info.GetReturnValue().Set(wrapper->{{ field.name }}.GetCallback()->GetFunction()); + } else { + info.GetReturnValue().SetUndefined(); + } + + {% elsif field.cppClassName == 'String' %} + if (wrapper->GetValue()->{{ field.name }}) { + info.GetReturnValue().Set(Nan::New(wrapper->GetValue()->{{ field.name }}).ToLocalChecked()); + } + else { + return; + } + + {% elsif field.cppClassName|isV8Value %} + info.GetReturnValue().Set(Nan::New<{{ field.cppClassName }}>(wrapper->GetValue()->{{ field.name }})); + {% endif %} + } + + NAN_SETTER({{ cppClassName }}::Set{{ field.cppFunctionName }}) { + {{ cppClassName }} *wrapper = Nan::ObjectWrap::Unwrap<{{ cppClassName }}>(info.This()); + + {% if field.isEnum %} + if (value->IsNumber()) { + wrapper->GetValue()->{{ field.name }} = ({{ field.cType }}) Nan::To(value).FromJust(); + } + + {% elsif field.isLibgitType %} + Local {{ field.name }}(value->ToObject()); + + wrapper->{{ field.name }}.Reset({{ field.name }}); + + wrapper->raw->{{ field.name }} = {% if not field.cType | isPointer %}*{% endif %}{% if field.cppClassName == 'GitStrarray' %}StrArrayConverter::Convert({{ field.name }}->ToObject()){% else %}Nan::ObjectWrap::Unwrap<{{ field.cppClassName }}>({{ field.name }}->ToObject())->GetValue(){% endif %}; + + {% elsif field.isCallbackFunction %} + Nan::Callback *callback = NULL; + int throttle = {%if field.return.throttle %}{{ field.return.throttle }}{%else%}0{%endif%}; + + if (value->IsFunction()) { + callback = new Nan::Callback(value.As()); + } else if (value->IsObject()) { + Local object = value.As(); + Local callbackKey; + Nan::MaybeLocal maybeObjectCallback = Nan::Get(object, Nan::New("callback").ToLocalChecked()); + if (!maybeObjectCallback.IsEmpty()) { + Local objectCallback = maybeObjectCallback.ToLocalChecked(); + if (objectCallback->IsFunction()) { + callback = new Nan::Callback(objectCallback.As()); + Nan::MaybeLocal maybeObjectThrottle = Nan::Get(object, Nan::New("throttle").ToLocalChecked()); + if(!maybeObjectThrottle.IsEmpty()) { + Local objectThrottle = maybeObjectThrottle.ToLocalChecked(); + if (objectThrottle->IsNumber()) { + throttle = (int)objectThrottle.As()->Value(); + } + } + } + } + } + if (callback) { + if (!wrapper->raw->{{ field.name }}) { + wrapper->raw->{{ field.name }} = ({{ field.cType }}){{ field.name }}_cppCallback; + } + + wrapper->{{ field.name }}.SetCallback(callback, throttle); + } + + {% elsif field.payloadFor %} + wrapper->{{ field.name }}.Reset(value); + + {% elsif field.cppClassName == 'String' %} + if (wrapper->GetValue()->{{ field.name }}) { + } + + String::Utf8Value str(value); + wrapper->GetValue()->{{ field.name }} = strdup(*str); + + {% elsif field.isCppClassIntType %} + if (value->IsNumber()) { + wrapper->GetValue()->{{ field.name }} = value->{{field.cppClassName}}Value(); + } + + {% else %} + if (value->IsNumber()) { + wrapper->GetValue()->{{ field.name }} = ({{ field.cType }}) Nan::To(value).FromJust(); + } + {% endif %} + } + + {% if field.isCallbackFunction %} + {{ cppClassName }}* {{ cppClassName }}::{{ field.name }}_getInstanceFromBaton({{ field.name|titleCase }}Baton* baton) { + return static_cast<{{ cppClassName }}*>(baton->{% each field.args|argsInfo as arg %} + {% if arg.payload == true %}{{arg.name}}{% elsif arg.lastArg %}{{arg.name}}{% endif %} + {% endeach %}); + } + + {{ field.return.type }} {{ cppClassName }}::{{ field.name }}_cppCallback ( + {% each field.args|argsInfo as arg %} + {{ arg.cType }} {{ arg.name}}{% if not arg.lastArg %},{% endif %} + {% endeach %} + ) { + {{ field.name|titleCase }}Baton* baton = + new {{ field.name|titleCase }}Baton({{ field.return.noResults }}); + + {% each field.args|argsInfo as arg %} + baton->{{ arg.name }} = {{ arg.name }}; + {% endeach %} + + {{ cppClassName }}* instance = {{ field.name }}_getInstanceFromBaton(baton); + + if (instance->{{ field.name }}.WillBeThrottled()) { + return baton->defaultResult; + } + + return baton->ExecuteAsync((uv_async_cb) {{ field.name }}_async); + } + + void {{ cppClassName }}::{{ field.name }}_async(uv_async_t* req, int status) { + Nan::HandleScope scope; + + {{ field.name|titleCase }}Baton* baton = static_cast<{{ field.name|titleCase }}Baton*>(req->data); + {{ cppClassName }}* instance = {{ field.name }}_getInstanceFromBaton(baton); + + if (instance->{{ field.name }}.GetCallback()->IsEmpty()) { + {% if field.return.type == "int" %} + baton->result = baton->defaultResult; // no results acquired + {% endif %} + + baton->done = true; + return; + } + + {% each field.args|argsInfo as arg %} + {% if arg.name == "payload" %} + {%-- Do nothing --%} + {% elsif arg.isJsArg %} + {% if arg.cType == "const char *" %} + if (baton->{{ arg.name }} == NULL) { + baton->{{ arg.name }} = ""; + } + {% endif %} + {% endif %} + {% endeach %} + + Local argv[{{ field.args|jsArgsCount }}] = { + {% each field.args|argsInfo as arg %} + {% if arg.name == "payload" %} + {%-- payload is always the last arg --%} + Nan::New(instance->{{ fields|payloadFor field.name }}) + {% elsif arg.isJsArg %} + {% if arg.isEnum %} + Nan::New((int)baton->{{ arg.name }}), + {% elsif arg.isLibgitType %} + {{ arg.cppClassName }}::New(baton->{{ arg.name }}, false), + {% elsif arg.cType == "size_t" %} + // HACK: NAN should really have an overload for Nan::New to support size_t + Nan::New((unsigned int)baton->{{ arg.name }}), + {% elsif arg.cppClassName == 'String' %} + Nan::New(baton->{{ arg.name }}).ToLocalChecked(), + {% else %} + Nan::New(baton->{{ arg.name }}), + {% endif %} + {% endif %} + {% endeach %} + }; + + Nan::TryCatch tryCatch; + Local result = instance->{{ field.name }}.GetCallback()->Call({{ field.args|jsArgsCount }}, argv); + + uv_close((uv_handle_t*) &baton->req, NULL); + + if(PromiseCompletion::ForwardIfPromise(result, baton, {{ cppClassName }}::{{ field.name }}_promiseCompleted)) { + return; + } + + {% each field|returnsInfo false true as _return %} + if (result.IsEmpty() || result->IsNativeError()) { + baton->result = {{ field.return.error }}; + } + else if (!result->IsNull() && !result->IsUndefined()) { + {% if _return.isOutParam %} + {{ _return.cppClassName }}* wrapper = Nan::ObjectWrap::Unwrap<{{ _return.cppClassName }}>(result->ToObject()); + wrapper->selfFreeing = false; + + *baton->{{ _return.name }} = wrapper->GetValue(); + baton->result = {{ field.return.success }}; + {% else %} + if (result->IsNumber()) { + baton->result = (int)result->ToNumber()->Value(); + } + else { + baton->result = baton->defaultResult; + } + {% endif %} + } + else { + baton->result = baton->defaultResult; + } + {% endeach %} + baton->done = true; + } + + void {{ cppClassName }}::{{ field.name }}_promiseCompleted(bool isFulfilled, AsyncBaton *_baton, v8::Local result) { + Nan::HandleScope scope; + + {{ field.name|titleCase }}Baton* baton = static_cast<{{ field.name|titleCase }}Baton*>(_baton); + + if (isFulfilled) { + {% each field|returnsInfo false true as _return %} + if (result.IsEmpty() || result->IsNativeError()) { + baton->result = {{ field.return.error }}; + } + else if (!result->IsNull() && !result->IsUndefined()) { + {% if _return.isOutParam %} + {{ _return.cppClassName }}* wrapper = Nan::ObjectWrap::Unwrap<{{ _return.cppClassName }}>(result->ToObject()); + wrapper->selfFreeing = false; + + *baton->{{ _return.name }} = wrapper->GetValue(); + baton->result = {{ field.return.success }}; + {% else %} + if (result->IsNumber()) { + baton->result = (int)result->ToNumber()->Value(); + } + else{ + baton->result = baton->defaultResult; + } + {% endif %} + } + else { + baton->result = baton->defaultResult; + } + {% endeach %} + } + else { + // promise was rejected + {{ cppClassName }}* instance = static_cast<{{ cppClassName }}*>(baton->{% each field.args|argsInfo as arg %} + {% if arg.payload == true %}{{arg.name}}{% elsif arg.lastArg %}{{arg.name}}{% endif %} + {% endeach %}); + Local parent = instance->handle(); + SetPrivate(parent, Nan::New("NodeGitPromiseError").ToLocalChecked(), result); + + baton->result = {{ field.return.error }}; + } + baton->done = true; + } + {% endif %} + {% endif %} +{% endeach %} diff --git a/generate/templates/partials/fields.cc b/generate/templates/partials/fields.cc new file mode 100644 index 000000000..ae3aeae44 --- /dev/null +++ b/generate/templates/partials/fields.cc @@ -0,0 +1,28 @@ +{% each fields|fieldsInfo as field %} + {% if not field.ignore %} + NAN_METHOD({{ cppClassName }}::{{ field.cppFunctionName }}) { + Local to; + + {% if field | isFixedLengthString %} + char* {{ field.name }} = (char *)Nan::ObjectWrap::Unwrap<{{ cppClassName }}>(info.This())->GetValue()->{{ field.name }}; + {% else %} + {{ field.cType }} + {% if not field.cppClassName|isV8Value %} + {% if not field.cType|isPointer %} + * + {% endif %} + {% endif %} + {{ field.name }} = + {% if not field.cppClassName|isV8Value %} + {% if not field.cType|isPointer %} + & + {% endif %} + {% endif %} + Nan::ObjectWrap::Unwrap<{{ cppClassName }}>(info.This())->GetValue()->{{ field.name }}; + {% endif %} + + {% partial convertToV8 field %} + info.GetReturnValue().Set(to); + } + {% endif %} +{% endeach %} diff --git a/generate/templates/partials/guard_arguments.cc b/generate/templates/partials/guard_arguments.cc new file mode 100644 index 000000000..bcfc33284 --- /dev/null +++ b/generate/templates/partials/guard_arguments.cc @@ -0,0 +1,28 @@ + +{%each args|argsInfo as arg%} + {%if arg.isJsArg%} + {%if not arg.isOptional%} + {%if not arg.payloadFor %} + {%if arg | isOid %} + if (info.Length() == {{arg.jsArg}} + || (!info[{{arg.jsArg}}]->IsObject() && !info[{{arg.jsArg}}]->IsString())) { + return Nan::ThrowError("{{arg.jsClassName}} {{arg.name}} is required."); + } + {%elsif arg.isCallbackFunction %} + if (info.Length() == {{arg.jsArg}} || !info[{{arg.jsArg}}]->IsFunction()) { + return Nan::ThrowError("{{arg.jsClassName}} {{arg.name}} is required."); + } + {%elsif arg.cppClassName == "GitStrarray" %} + if (info.Length() == {{arg.jsArg}} || !(Nan::To(info[{{arg.jsArg}}]).FromJust())) { + return Nan::ThrowError("Array, String Object, or string {{arg.name}} is required."); + } + {%else%} + if (info.Length() == {{arg.jsArg}} || !info[{{arg.jsArg}}]->Is{{arg.cppClassName|cppToV8}}()) { + return Nan::ThrowError("{{arg.jsClassName}} {{arg.name}} is required."); + } + + {%endif%} + {%endif%} + {%endif%} + {%endif%} +{%endeach%} diff --git a/generate/templates/partials/sync_function.cc b/generate/templates/partials/sync_function.cc new file mode 100644 index 000000000..d53a5c2d0 --- /dev/null +++ b/generate/templates/partials/sync_function.cc @@ -0,0 +1,128 @@ + +{%partial doc .%} +NAN_METHOD({{ cppClassName }}::{{ cppFunctionName }}) { + Nan::EscapableHandleScope scope; + {%partial guardArguments .%} + + {%each .|returnsInfo 'true' as _return %} + {%if _return.shouldAlloc %} + {{ _return.cType }}{{ _return.name }} = ({{ _return.cType }})malloc(sizeof({{ _return.cType|unPointer }})); + {%else%} + {{ _return.cType|unPointer }} {{ _return.name }} = {{ _return.cType|unPointer|defaultValue }}; + {%endif%} + {%endeach%} + + {%each args|argsInfo as arg %} + {%if not arg.isSelf %} + {%if not arg.isReturn %} + {%partial convertFromV8 arg %} + {%if arg.saveArg %} + Local {{ arg.name }}(info[{{ arg.jsArg }}]->ToObject()); + {{ cppClassName }} *thisObj = Nan::ObjectWrap::Unwrap<{{ cppClassName }}>(info.This()); + + thisObj->{{ cppFunctionName }}_{{ arg.name }}.Reset({{ arg.name }}); + {%endif%} + {%endif%} + {%endif%} + {%endeach%} + +{%each args|argsInfo as arg %} +{%endeach%} + +{%-- Inside a free call, if the value is already free'd don't do it again.--%} +{% if cppFunctionName == "Free" %} +if (Nan::ObjectWrap::Unwrap<{{ cppClassName }}>(info.This())->GetValue() != NULL) { +{% endif %} + + giterr_clear(); + + { + LockMaster lockMaster(/*asyncAction: */false{%each args|argsInfo as arg %} + {%if arg.cType|isPointer%}{%if not arg.isReturn%} + ,{%if arg.isSelf %} + Nan::ObjectWrap::Unwrap<{{ arg.cppClassName }}>(info.This())->GetValue() + {%else%} + from_{{ arg.name }} + {%endif%} + {%endif%}{%endif%} + {%endeach%}); + + {%if .|hasReturnValue %} + {{ return.cType }} result = {%endif%}{{ cFunctionName }}( + {%each args|argsInfo as arg %} + {%if arg.isReturn %} + {%if not arg.shouldAlloc %}&{%endif%} + {%endif%} + {%if arg.isSelf %} + Nan::ObjectWrap::Unwrap<{{ arg.cppClassName }}>(info.This())->GetValue() + {%elsif arg.isReturn %} + {{ arg.name }} + {%else%} + from_{{ arg.name }} + {%endif%} + {%if not arg.lastArg %},{%endif%} + {%endeach%} + ); + + {%if .|hasReturnValue |and return.isErrorCode %} + if (result != GIT_OK) { + {%each args|argsInfo as arg %} + {%if arg.shouldAlloc %} + free({{ arg.name }}); + {%elsif arg | isOid %} + if (info[{{ arg.jsArg }}]->IsString()) { + free({{ arg.name }}); + } + {%endif%} + {%endeach%} + + if (giterr_last()) { + return Nan::ThrowError(giterr_last()->message); + } else { + return Nan::ThrowError("Unknown Error"); + } + } + {%endif%} + + {% if cppFunctionName == "Free" %} + Nan::ObjectWrap::Unwrap<{{ cppClassName }}>(info.This())->ClearValue(); + } + {% endif %} + + + {%each args|argsInfo as arg %} + {%if arg | isOid %} + if (info[{{ arg.jsArg }}]->IsString()) { + free((void *)from_{{ arg.name }}); + } + {%endif%} + {%endeach%} + + {%if not .|returnsCount %} + return info.GetReturnValue().Set(scope.Escape(Nan::Undefined())); + {%else%} + {%if return.cType | isPointer %} + // null checks on pointers + if (!result) { + return info.GetReturnValue().Set(scope.Escape(Nan::Undefined())); + } + {%endif%} + + Local to; + {%if .|returnsCount > 1 %} + Local toReturn = Nan::New(); + {%endif%} + {%each .|returnsInfo as _return %} + {%partial convertToV8 _return %} + {%if .|returnsCount > 1 %} + Nan::Set(toReturn, Nan::New("{{ _return.returnNameOrName }}").ToLocalChecked(), to); + {%endif%} + {%endeach%} + {%if .|returnsCount == 1 %} + return info.GetReturnValue().Set(scope.Escape(to)); + {%else%} + return info.GetReturnValue().Set(scope.Escape(toReturn)); + {%endif%} + {%endif%} + } +} diff --git a/generate/templates/partials/traits.h b/generate/templates/partials/traits.h new file mode 100644 index 000000000..efcc4b49a --- /dev/null +++ b/generate/templates/partials/traits.h @@ -0,0 +1,28 @@ +class {{ cppClassName }}; + +struct {{ cppClassName }}Traits { + typedef {{ cppClassName }} cppClass; + typedef {{ cType }} cType; + + static const bool isDuplicable = {{ dupFunction|toBool |or cpyFunction|toBool}}; + static void duplicate({{ cType }} **dest, {{ cType }} *src) { + {% if dupFunction %} + {{ dupFunction }}(dest, src); + {% elsif cpyFunction %} + {{ cType }} *copy = ({{ cType }} *)malloc(sizeof({{ cType }})); + {{ cpyFunction }}(copy, src); + *dest = copy; + {% else %} + Nan::ThrowError("duplicate called on {{ cppClassName }} which cannot be duplicated"); + {% endif %} + } + + static const bool isFreeable = {{ freeFunctionName | toBool}}; + static void free({{ cType }} *raw) { + {% if freeFunctionName %} + ::{{ freeFunctionName }}(raw); // :: to avoid calling this free recursively + {% else %} + Nan::ThrowError("free called on {{ cppClassName }} which cannot be freed"); + {% endif %} + } +}; diff --git a/generate/templates/templates/binding.gyp b/generate/templates/templates/binding.gyp new file mode 100644 index 000000000..457221dcd --- /dev/null +++ b/generate/templates/templates/binding.gyp @@ -0,0 +1,107 @@ +{ + "targets": [ + { + "target_name": "nodegit", + + "dependencies": [ + "vendor/libgit2.gyp:libgit2" + ], + + "variables": { + "coverage%": 0 + }, + + "sources": [ + "src/lock_master.cc", + "src/nodegit.cc", + "src/init_ssh2.cc", + "src/promise_completion.cc", + "src/wrapper.cc", + "src/functions/copy.cc", + "src/functions/sleep_for_ms.cc", + "src/convenient_patch.cc", + "src/convenient_hunk.cc", + "src/str_array_converter.cc", + "src/thread_pool.cc", + {% each %} + {% if type != "enum" %} + "src/{{ name }}.cc", + {% endif %} + {% endeach %} + ], + + "include_dirs": [ + "vendor/libv8-convert", + "vendor/libssh2/include", + "vendor/openssl/openssl/include", + " +#include + +extern "C" { + #include + {% each cDependencies as dependency %} + #include <{{ dependency }}> + {% endeach %} +} + +#include "../include/nodegit.h" +#include "../include/lock_master.h" +#include "../include/functions/copy.h" +#include "../include/{{ filename }}.h" +#include "nodegit_wrapper.cc" +#include "../include/async_libgit2_queue_worker.h" + +{% each dependencies as dependency %} + #include "{{ dependency }}" +{% endeach %} + +#include + +using namespace std; +using namespace v8; +using namespace node; + +{% if cType %} + {{ cppClassName }}::~{{ cppClassName }}() { + // this will cause an error if you have a non-self-freeing object that also needs + // to save values. Since the object that will eventually free the object has no + // way of knowing to free these values. + {% each functions as function %} + {% if not function.ignore %} + {% each function.args as arg %} + {% if arg.saveArg %} + + {{ function.cppFunctionName }}_{{ arg.name }}).Reset(); + + {% endif %} + {% endeach %} + {% endif %} + {% endeach %} + } + + void {{ cppClassName }}::InitializeComponent(Local target) { + Nan::HandleScope scope; + + Local tpl = Nan::New(JSNewFunction); + + tpl->InstanceTemplate()->SetInternalFieldCount(1); + tpl->SetClassName(Nan::New("{{ jsClassName }}").ToLocalChecked()); + + {% each functions as function %} + {% if not function.ignore %} + {% if function.isPrototypeMethod %} + Nan::SetPrototypeMethod(tpl, "{{ function.jsFunctionName }}", {{ function.cppFunctionName }}); + {% else %} + Nan::SetMethod(tpl, "{{ function.jsFunctionName }}", {{ function.cppFunctionName }}); + {% endif %} + {% endif %} + {% endeach %} + + {% each fields as field %} + {% if not field.ignore %} + Nan::SetPrototypeMethod(tpl, "{{ field.jsFunctionName }}", {{ field.cppFunctionName }}); + {% endif %} + {% endeach %} + + InitializeTemplate(tpl); + + Local _constructor_template = Nan::GetFunction(tpl).ToLocalChecked(); + constructor_template.Reset(_constructor_template); + Nan::Set(target, Nan::New("{{ jsClassName }}").ToLocalChecked(), _constructor_template); + } + +{% else %} + + void {{ cppClassName }}::InitializeComponent(Local target) { + Nan::HandleScope scope; + + Local object = Nan::New(); + + {% each functions as function %} + {% if not function.ignore %} + Nan::SetMethod(object, "{{ function.jsFunctionName }}", {{ function.cppFunctionName }}); + {% endif %} + {% endeach %} + + Nan::Set(target, Nan::New("{{ jsClassName }}").ToLocalChecked(), object); + } + +{% endif %} + +{% each functions as function %} + {% if not function.ignore %} + {% if function.isManual %} + {{= function.implementation =}} + {% elsif function.isAsync %} + {% partial asyncFunction function %} + {% else %} + {% partial syncFunction function %} + {% endif %} + {% endif %} +{% endeach %} + +{% partial fields . %} + +{%if cType %} +// force base class template instantiation, to make sure we get all the +// methods, statics, etc. +template class NodeGitWrapper<{{ cppClassName }}Traits>; +{% endif %} diff --git a/generate/templates/templates/class_header.h b/generate/templates/templates/class_header.h new file mode 100644 index 000000000..aff51243a --- /dev/null +++ b/generate/templates/templates/class_header.h @@ -0,0 +1,192 @@ +#ifndef {{ cppClassName|upper }}_H +#define {{ cppClassName|upper }}_H +#include +#include +#include +#include + +#include "async_baton.h" +#include "nodegit_wrapper.h" +#include "promise_completion.h" + +extern "C" { +#include +{%each cDependencies as dependency %} +#include <{{ dependency }}> +{%endeach%} +} + +#include "../include/typedefs.h" + +{%each dependencies as dependency%} +#include "{{ dependency }}" +{%endeach%} + +{%if needsForwardDeclaration %} +// Forward declaration. +struct {{ cType }} { + {%each fields as field%} + {%if not field.ignore%} + {{ field.structType|or field.cType }} {{ field.structName|or field.name }}; + {%endif%} + {%endeach%} +}; +{%endif%} + +using namespace node; +using namespace v8; + +{%if cType %} +{%partial traits .%} +{%endif%} + +class {{ cppClassName }} : public +{%if cType %} + NodeGitWrapper<{{ cppClassName }}Traits> +{%else%} + Nan::ObjectWrap +{%endif%} +{ + {%if cType %} + // grant full access to base class + friend class NodeGitWrapper<{{ cppClassName }}Traits>; + {%endif %} + public: + static void InitializeComponent (Local target); + + {% each functions as function %} + {% if not function.ignore %} + {%each function.args as arg %} + {%if arg.isCallbackFunction %} + static {{ arg.return.type }} {{ function.cppFunctionName }}_{{ arg.name }}_cppCallback ( + {% each arg.args|argsInfo as cbArg %} + {{ cbArg.cType }} {{ cbArg.name }} + {% if not cbArg.lastArg %} + , + {% endif %} + {% endeach %} + ); + + static void {{ function.cppFunctionName }}_{{ arg.name }}_async(uv_async_t* req, int status); + static void {{ function.cppFunctionName }}_{{ arg.name }}_promiseCompleted(bool isFulfilled, AsyncBaton *_baton, v8::Local result); + struct {{ function.cppFunctionName }}_{{ arg.name|titleCase }}Baton : public AsyncBatonWithResult<{{ arg.return.type }}> { + {% each arg.args|argsInfo as cbArg %} + {{ cbArg.cType }} {{ cbArg.name }}; + {% endeach %} + + {{ function.cppFunctionName }}_{{ arg.name|titleCase }}Baton(const {{ arg.return.type }} &defaultResult) + : AsyncBatonWithResult<{{ arg.return.type }}>(defaultResult) { + } + }; + {% endif %} + {% endeach %} + {% endif %} + {% endeach %} + + + private: + {%if cType%} + {{ cppClassName }}() + : NodeGitWrapper<{{ cppClassName }}Traits>( + {% if createFunctionName %} + "A new {{ cppClassName }} cannot be instantiated. Use {{ jsCreateFunctionName }} instead." + {% else %} + "A new {{ cppClassName }} cannot be instantiated." + {% endif %} + ) + {} + {{ cppClassName }}({{ cType }} *raw, bool selfFreeing, Local owner = Local()) + : NodeGitWrapper<{{ cppClassName }}Traits>(raw, selfFreeing, owner) + {} + ~{{ cppClassName }}(); + {%endif%} + + {% each functions as function %} + {% if not function.ignore %} + {% each function.args as arg %} + {% if arg.saveArg %} + Nan::Persistent {{ function.cppFunctionName }}_{{ arg.name }}; + {% endif %} + {% endeach %} + {% endif %} + {% endeach %} + + {%each fields as field%} + {%if not field.ignore%} + static NAN_METHOD({{ field.cppFunctionName }}); + {%endif%} + {%endeach%} + + {%each functions as function%} + {%if not function.ignore %} + {%if function.isAsync %} + + struct {{ function.cppFunctionName }}Baton { + int error_code; + const git_error* error; + {%each function.args as arg%} + {%if arg.isReturn%} + {{= arg.cType|replace "**" "*" =}} {{ arg.name }}; + {%else%} + {{= arg.cType =}} {{ arg.name }}; + {%if arg | isOid %} + bool {{ arg.name }}NeedsFree; + {%endif%} + {%endif%} + {%endeach%} + }; + class {{ function.cppFunctionName }}Worker : public Nan::AsyncWorker { + public: + {{ function.cppFunctionName }}Worker( + {{ function.cppFunctionName }}Baton *_baton, + Nan::Callback *callback + ) : Nan::AsyncWorker(callback) + , baton(_baton) {}; + ~{{ function.cppFunctionName }}Worker() {}; + void Execute(); + void HandleOKCallback(); + + private: + {{ function.cppFunctionName }}Baton *baton; + }; + {%endif%} + + static NAN_METHOD({{ function.cppFunctionName }}); + {%endif%} + {%endeach%} + + {%each functions as function%} + {%each function.args as arg %} + {% if arg.globalPayload %} + + struct {{ function.cppFunctionName }}_globalPayload { + {%each function.args as arg %} + {%if arg.isCallbackFunction %} + Nan::Callback * {{ arg.name }}; + {%endif%} + {%endeach%} + + {{ function.cppFunctionName }}_globalPayload() { + {%each function.args as arg %} + {%if arg.isCallbackFunction %} + {{ arg.name }} = NULL; + {%endif%} + {%endeach%} + } + + ~{{ function.cppFunctionName }}_globalPayload() { + {%each function.args as arg %} + {%if arg.isCallbackFunction %} + if ({{ arg.name }} != NULL) { + delete {{ arg.name }}; + } + {%endif%} + {%endeach%} + } + }; + {%endif%} + {%endeach%} + {%endeach%} +}; + +#endif diff --git a/generate/templates/templates/enums.js b/generate/templates/templates/enums.js new file mode 100644 index 000000000..cfb95ebd2 --- /dev/null +++ b/generate/templates/templates/enums.js @@ -0,0 +1,14 @@ +var NodeGit = require("../"); +NodeGit.Enums = {}; + +{% each . as enumerable %} + {% if not enumerable.ignore %} + {% if enumerable.type == "enum" %} + NodeGit.{{ enumerable.owner }}.{{ enumerable.JsName }} = { + {% each enumerable.values as value %} + {{ value.JsName }}: {{ value.value }}, + {% endeach %} + }; + {% endif %} + {% endif %} +{% endeach %} diff --git a/generate/templates/templates/nodegit.cc b/generate/templates/templates/nodegit.cc new file mode 100644 index 000000000..23adae3b5 --- /dev/null +++ b/generate/templates/templates/nodegit.cc @@ -0,0 +1,159 @@ +#include +#include + +#include +#include +#include +#include + +#include + +#include "../include/init_ssh2.h" +#include "../include/lock_master.h" +#include "../include/nodegit.h" +#include "../include/wrapper.h" +#include "../include/promise_completion.h" +#include "../include/functions/copy.h" +{% each %} + {% if type != "enum" %} + #include "../include/{{ filename }}.h" + {% endif %} +{% endeach %} +#include "../include/convenient_patch.h" +#include "../include/convenient_hunk.h" + +#if (NODE_MODULE_VERSION > 48) + v8::Local GetPrivate(v8::Local object, + v8::Local key) { + v8::Isolate* isolate = v8::Isolate::GetCurrent(); + v8::Local context = isolate->GetCurrentContext(); + v8::Local privateKey = v8::Private::ForApi(isolate, key); + v8::Local value; + v8::Maybe result = object->HasPrivate(context, privateKey); + if (!(result.IsJust() && result.FromJust())) + return v8::Local(); + if (object->GetPrivate(context, privateKey).ToLocal(&value)) + return value; + return v8::Local(); + } + + void SetPrivate(v8::Local object, + v8::Local key, + v8::Local value) { + if (value.IsEmpty()) + return; + v8::Isolate* isolate = v8::Isolate::GetCurrent(); + v8::Local context = isolate->GetCurrentContext(); + v8::Local privateKey = v8::Private::ForApi(isolate, key); + object->SetPrivate(context, privateKey, value); + } +#else + v8::Local GetPrivate(v8::Local object, + v8::Local key) { + return object->GetHiddenValue(key); + } + + void SetPrivate(v8::Local object, + v8::Local key, + v8::Local value) { + object->SetHiddenValue(key, value); + } +#endif + +void LockMasterEnable(const FunctionCallbackInfo& info) { + LockMaster::Enable(); +} + +void LockMasterSetStatus(const FunctionCallbackInfo& info) { + Nan::HandleScope scope; + + // convert the first argument to Status + if(info.Length() >= 0 && info[0]->IsNumber()) { + v8::Local value = info[0]->ToInt32(); + LockMaster::Status status = static_cast(value->Value()); + if(status >= LockMaster::Disabled && status <= LockMaster::Enabled) { + LockMaster::SetStatus(status); + return; + } + } + + // argument error + Nan::ThrowError("Argument must be one 0, 1 or 2"); +} + +void LockMasterGetStatus(const FunctionCallbackInfo& info) { + info.GetReturnValue().Set(Nan::New(LockMaster::GetStatus())); +} + +void LockMasterGetDiagnostics(const FunctionCallbackInfo& info) { + LockMaster::Diagnostics diagnostics(LockMaster::GetDiagnostics()); + + // return a plain JS object with properties + v8::Local result = Nan::New(); + result->Set(Nan::New("storedMutexesCount").ToLocalChecked(), Nan::New(diagnostics.storedMutexesCount)); + info.GetReturnValue().Set(result); +} + +static uv_mutex_t *opensslMutexes; + +void OpenSSL_LockingCallback(int mode, int type, const char *, int) { + if (mode & CRYPTO_LOCK) { + uv_mutex_lock(&opensslMutexes[type]); + } else { + uv_mutex_unlock(&opensslMutexes[type]); + } +} + +unsigned long OpenSSL_IDCallback() { + return (unsigned long)uv_thread_self(); +} + +void OpenSSL_ThreadSetup() { + opensslMutexes=(uv_mutex_t *)malloc(CRYPTO_num_locks() * sizeof(uv_mutex_t)); + + for (int i=0; i target) { + // Initialize thread safety in openssl and libssh2 + OpenSSL_ThreadSetup(); + init_ssh2(); + // Initialize libgit2. + git_libgit2_init(); + + Nan::HandleScope scope; + + Wrapper::InitializeComponent(target); + PromiseCompletion::InitializeComponent(); + {% each %} + {% if type != "enum" %} + {{ cppClassName }}::InitializeComponent(target); + {% endif %} + {% endeach %} + + ConvenientHunk::InitializeComponent(target); + ConvenientPatch::InitializeComponent(target); + + NODE_SET_METHOD(target, "enableThreadSafety", LockMasterEnable); + NODE_SET_METHOD(target, "setThreadSafetyStatus", LockMasterSetStatus); + NODE_SET_METHOD(target, "getThreadSafetyStatus", LockMasterGetStatus); + NODE_SET_METHOD(target, "getThreadSafetyDiagnostics", LockMasterGetDiagnostics); + + Local threadSafety = Nan::New(); + threadSafety->Set(Nan::New("DISABLED").ToLocalChecked(), Nan::New((int)LockMaster::Disabled)); + threadSafety->Set(Nan::New("ENABLED_FOR_ASYNC_ONLY").ToLocalChecked(), Nan::New((int)LockMaster::EnabledForAsyncOnly)); + threadSafety->Set(Nan::New("ENABLED").ToLocalChecked(), Nan::New((int)LockMaster::Enabled)); + + target->Set(Nan::New("THREAD_SAFETY").ToLocalChecked(), threadSafety); + + LockMaster::Initialize(); +} + +NODE_MODULE(nodegit, init) diff --git a/generate/templates/templates/nodegit.js b/generate/templates/templates/nodegit.js new file mode 100644 index 000000000..da27db6cd --- /dev/null +++ b/generate/templates/templates/nodegit.js @@ -0,0 +1,124 @@ +var promisify = require("promisify-node"); +var rawApi; + +// Attempt to load the production release first, if it fails fall back to the +// debug release. +try { + rawApi = require("../build/Release/nodegit.node"); +} +catch (ex) { + /* istanbul ignore next */ + if (ex.code !== "MODULE_NOT_FOUND") { + throw ex; + } + + rawApi = require("../build/Debug/nodegit.node"); +} + +// Native methods do not return an identifiable function, so we +// have to override them here +/* jshint ignore:start */ +{% each . as idef %} + {% if idef.type != "enum" %} + + {% if idef.functions.length > 0 %} + var _{{ idef.jsClassName }} + = rawApi.{{ idef.jsClassName }}; + {% endif %} + + {% each idef.functions as fn %} + {% if fn.isAsync %} + + {% if fn.isPrototypeMethod %} + + var _{{ idef.jsClassName }}_{{ fn.jsFunctionName}} + = _{{ idef.jsClassName }}.prototype.{{ fn.jsFunctionName }}; + _{{ idef.jsClassName }}.prototype.{{ fn.jsFunctionName }} + = promisify(_{{ idef.jsClassName }}_{{ fn.jsFunctionName}}); + + {% else %} + + var _{{ idef.jsClassName }}_{{ fn.jsFunctionName}} + = _{{ idef.jsClassName }}.{{ fn.jsFunctionName }}; + _{{ idef.jsClassName }}.{{ fn.jsFunctionName }} + = promisify(_{{ idef.jsClassName }}_{{ fn.jsFunctionName}}); + + {% endif %} + + {% endif %} + {% endeach %} + + {% endif %} +{% endeach %} + +var _ConvenientPatch = rawApi.ConvenientPatch; +var _ConvenientPatch_hunks = _ConvenientPatch.prototype.hunks; +_ConvenientPatch.prototype.hunks = promisify(_ConvenientPatch_hunks); + +var _ConvenientHunk = rawApi.ConvenientHunk; +var _ConvenientHunk_lines = _ConvenientHunk.prototype.lines; +_ConvenientHunk.prototype.lines = promisify(_ConvenientHunk_lines); +/* jshint ignore:end */ + +// Set the exports prototype to the raw API. +exports.__proto__ = rawApi; + +var importExtension = function(name) { + try { + require("./" + name); + } + catch (unhandledException) { + if (unhandledException.code != "MODULE_NOT_FOUND") { + throw unhandledException; + } + } +}; + +// Load up utils +rawApi.Utils = {}; +require("./utils/lookup_wrapper"); +require("./utils/normalize_options"); +require("./utils/shallow_clone"); + +// Load up extra types; +require("./status_file"); +require("./enums.js"); + +// Import extensions +{% each %} + {% if type != "enum" %} + importExtension("{{ filename }}"); + {% endif %} +{% endeach %} +/* jshint ignore:start */ +{% each . as idef %} + {% if idef.type != "enum" %} + {% each idef.functions as fn %} + {% if fn.useAsOnRootProto %} + + // Inherit directly from the original {{idef.jsClassName}} object. + _{{ idef.jsClassName }}.{{ fn.jsFunctionName }}.__proto__ = + _{{ idef.jsClassName }}; + + // Ensure we're using the correct prototype. + _{{ idef.jsClassName }}.{{ fn.jsFunctionName }}.prototype = + _{{ idef.jsClassName }}.prototype; + + // Assign the function as the root + rawApi.{{ idef.jsClassName }} = + _{{ idef.jsClassName }}.{{ fn.jsFunctionName }}; + + {% endif %} + {% endeach %} + {% endif %} +{% endeach %} +/* jshint ignore:end */ + +// Wrap asynchronous methods to return promises. +promisify(exports); + +// Set version. +exports.version = require("../package").version; + +// Expose Promise implementation. +exports.Promise = Promise; diff --git a/generate/templates/templates/struct_content.cc b/generate/templates/templates/struct_content.cc new file mode 100644 index 000000000..14b3db242 --- /dev/null +++ b/generate/templates/templates/struct_content.cc @@ -0,0 +1,120 @@ +#include +#include +#ifdef WIN32 +#include +#else +#include +#endif // win32 + +extern "C" { + #include + {% each cDependencies as dependency %} + #include <{{ dependency }}> + {% endeach %} +} + +#include +#include "../include/nodegit.h" +#include "../include/lock_master.h" +#include "../include/functions/copy.h" +#include "../include/{{ filename }}.h" +#include "nodegit_wrapper.cc" + +{% each dependencies as dependency %} + #include "{{ dependency }}" +{% endeach %} + +using namespace v8; +using namespace node; +using namespace std; + + +// generated from struct_content.cc +{{ cppClassName }}::{{ cppClassName }}() : NodeGitWrapper<{{ cppClassName }}Traits>(NULL, true, v8::Local()) +{ + {% if ignoreInit == true %} + this->raw = new {{ cType }}; + {% else %} + {{ cType }} wrappedValue = {{ cType|upper }}_INIT; + this->raw = ({{ cType }}*) malloc(sizeof({{ cType }})); + memcpy(this->raw, &wrappedValue, sizeof({{ cType }})); + {% endif %} + + this->ConstructFields(); +} + +{{ cppClassName }}::{{ cppClassName }}({{ cType }}* raw, bool selfFreeing, v8::Local owner) + : NodeGitWrapper<{{ cppClassName }}Traits>(raw, selfFreeing, owner) +{ + this->ConstructFields(); +} + +{{ cppClassName }}::~{{ cppClassName }}() { + {% each fields|fieldsInfo as field %} + {% if not field.ignore %} + {% if not field.isEnum %} + {% if field.isCallbackFunction %} + if (this->{{ field.name }}.HasCallback()) { + this->raw->{{ fields|payloadFor field.name }} = NULL; + } + {% endif %} + {% endif %} + {% endif %} + {% endeach %} +} + +void {{ cppClassName }}::ConstructFields() { + {% each fields|fieldsInfo as field %} + {% if not field.ignore %} + {% if not field.isEnum %} + {% if field.hasConstructor |or field.isLibgitType %} + Local {{ field.name }}Temp = {{ field.cppClassName }}::New( + {%if not field.cType|isPointer %}&{%endif%}this->raw->{{ field.name }}, + false + )->ToObject(); + this->{{ field.name }}.Reset({{ field.name }}Temp); + + {% elsif field.isCallbackFunction %} + + // Set the static method call and set the payload for this function to be + // the current instance + this->raw->{{ field.name }} = NULL; + this->raw->{{ fields|payloadFor field.name }} = (void *)this; + {% elsif field.payloadFor %} + + Local {{ field.name }} = Nan::Undefined(); + this->{{ field.name }}.Reset({{ field.name }}); + {% endif %} + {% endif %} + {% endif %} + {% endeach %} +} + +void {{ cppClassName }}::InitializeComponent(Local target) { + Nan::HandleScope scope; + + Local tpl = Nan::New(JSNewFunction); + + tpl->InstanceTemplate()->SetInternalFieldCount(1); + tpl->SetClassName(Nan::New("{{ jsClassName }}").ToLocalChecked()); + + {% each fields as field %} + {% if not field.ignore %} + {% if not field | isPayload %} + Nan::SetAccessor(tpl->InstanceTemplate(), Nan::New("{{ field.jsFunctionName }}").ToLocalChecked(), Get{{ field.cppFunctionName}}, Set{{ field.cppFunctionName}}); + {% endif %} + {% endif %} + {% endeach %} + + InitializeTemplate(tpl); + + Local _constructor_template = Nan::GetFunction(tpl).ToLocalChecked(); + constructor_template.Reset(_constructor_template); + Nan::Set(target, Nan::New("{{ jsClassName }}").ToLocalChecked(), _constructor_template); +} + +{% partial fieldAccessors . %} + +// force base class template instantiation, to make sure we get all the +// methods, statics, etc. +template class NodeGitWrapper<{{ cppClassName }}Traits>; diff --git a/generate/templates/templates/struct_header.h b/generate/templates/templates/struct_header.h new file mode 100644 index 000000000..122a55c0a --- /dev/null +++ b/generate/templates/templates/struct_header.h @@ -0,0 +1,89 @@ +#ifndef {{ cppClassName|upper }}_H +#define {{ cppClassName|upper }}_H +#include +#include +#include +#include + +#include "async_baton.h" +#include "callback_wrapper.h" +#include "nodegit_wrapper.h" + +extern "C" { + #include + {% each cDependencies as dependency %} + #include <{{ dependency }}> + {% endeach %} +} + +{% each dependencies as dependency %} + #include "{{ dependency }}" +{% endeach %} + +using namespace node; +using namespace v8; + +{%partial traits .%} + +class {{ cppClassName }} : public NodeGitWrapper<{{ cppClassName }}Traits> { + // grant full access to base class + friend class NodeGitWrapper<{{ cppClassName }}Traits>; + public: + {{ cppClassName }}({{ cType }}* raw, bool selfFreeing, v8::Local owner = Local()); + static void InitializeComponent (Local target); + + {% each fields as field %} + {% if not field.ignore %} + {% if field.isCallbackFunction %} + static {{ field.return.type }} {{ field.name }}_cppCallback ( + {% each field.args|argsInfo as arg %} + {{ arg.cType }} {{ arg.name}} + {% if not arg.lastArg %} + , + {% endif %} + {% endeach %} + ); + + static void {{ field.name }}_async(uv_async_t* req, int status); + static void {{ field.name }}_promiseCompleted(bool isFulfilled, AsyncBaton *_baton, v8::Local result); + struct {{ field.name|titleCase }}Baton : public AsyncBatonWithResult<{{ field.return.type }}> { + {% each field.args|argsInfo as arg %} + {{ arg.cType }} {{ arg.name}}; + {% endeach %} + + {{ field.name|titleCase }}Baton(const {{ field.return.type }} &defaultResult) + : AsyncBatonWithResult<{{ field.return.type }}>(defaultResult) { + } + }; + static {{ cppClassName }} * {{ field.name }}_getInstanceFromBaton ( + {{ field.name|titleCase }}Baton *baton); + {% endif %} + {% endif %} + {% endeach %} + + private: + {{ cppClassName }}(); + ~{{ cppClassName }}(); + + void ConstructFields(); + + {% each fields as field %} + {% if not field.ignore %} + {% if not field.isEnum %} + {% if field.isLibgitType %} + Nan::Persistent {{ field.name }}; + {% elsif field.isCallbackFunction %} + CallbackWrapper {{ field.name }}; + {% elsif field.payloadFor %} + Nan::Persistent {{ field.name }}; + {% endif %} + {% endif %} + + static NAN_GETTER(Get{{ field.cppFunctionName }}); + static NAN_SETTER(Set{{ field.cppFunctionName }}); + + {% endif %} + {% endeach %} +}; + +#endif diff --git a/guides/README.md b/guides/README.md new file mode 100644 index 000000000..e4a2bdf14 --- /dev/null +++ b/guides/README.md @@ -0,0 +1,34 @@ +--- +layout: guides +menu_item: guides +title: Guides +description: Learning NodeGit +--- + +## Install + +> How to install NodeGit + +- [Basics](install/) +- [From source](install/from-source) +- [Electron](install/electron/) +- [NW.js](install/nw.js/) + +*** + +## Repository + +> How to work with repositories + +- [Opening](repositories/) +- [Initializing](repositories/initializing) + +*** + +## Cloning + +> How to clone repositories + +- [HTTP/HTTPS](cloning/) +- [SSH w/ Agent](cloning/ssh-with-agent/) +- [GitHub Two Factor Auth](cloning/gh-two-factor/) diff --git a/guides/cloning/README.md b/guides/cloning/README.md new file mode 100644 index 000000000..7a44d0cf6 --- /dev/null +++ b/guides/cloning/README.md @@ -0,0 +1,132 @@ +--- +layout: full +menu_item: guides +title: HTTP/HTTPS Guide +description: How to clone with HTTP/HTTPS +--- + +**In order to run examples, you will need to [Install NodeGit](../install) +first.** + +[Return to all guides](../) + +* * * + +HTTP/HTTPS +---------- + +This guide explains how to clone a repository, and in the case of failure, +attempt to open the existing path. + +[View example source](index.js) + +### Requiring NodeGit + +In the guides directory, we like to keep our NodeGit relative to the project +root. + +``` javascript +var NodeGit = require("../../../"); +``` + +However, in your project you will most likely be using the following command: + +``` javascript +var NodeGit = require("nodegit"); +``` + +### Clone URL + +The first argument to the `clone` method is a URL. + +In this example we're going to clone one of our test repositories from GitHub. +You could easily substitute this with any valid http or https Git repository +URL. + +``` javascript +var cloneURL = "https://github.com/nodegit/test"; +``` + +### Clone path + +The second argument to the `clone` method is a path. + +Ideally your application will clone a repository into the same folder path +regardless of how or where you execute it from. Paths are relative to the +current working directory in NodeGit, so you will need to normalize it first. + +This is very simple in Node: + +``` javascript +var localPath = require("path").join(__dirname, "tmp"); +``` + +Now this `tmp` directory will be created along side your script, no matter how +or where you execute it from. + +### Clone options + +The third argument to the `clone` method is an optional simple object. + +``` javascript +var cloneOptions = {}; +``` + +**If you are using HTTP the OS X issue below does not affect you.** + +#### GitHub certificate issue in OS X + +Unfortunately in OS X there is a problem where libgit2 is unable to look up +GitHub certificates correctly. In order to bypass this problem, we're going +to passthrough the certificate check. + +*Note: this is not a problem with Windows or Linux* + +``` javascript +cloneOptions.fetchOpts = { + callbacks: { + certificateCheck: function() { return 1; } + } +}; +``` + +### Invoking the clone method + +You can easily invoke our top-level Clone as a function passing along the three +aforementioned arguments. + +``` javascript +var cloneRepository = NodeGit.Clone(cloneURL, localPath, cloneOptions); +``` + +Notice how we store the return value from `Git.Clone`. This is a +[Promise](https://www.promisejs.org/) to represent the asynchronous operation. +It offers finer control flow by allowing us to capture errors and fallback if +there is a clone failure. + +### Handling clone failure + +A naive way to handle a clone failure is to try opening the same path. Clones +will most commonly fail when the directory already exists. We can define +a function to attempt opening in this case. + +``` javascript +var errorAndAttemptOpen = function() { + return NodeGit.Repository.open(local); +}; +``` + +This will be called as part of the Promise resolution in the final step. + +### The Promise chain + +Lastly in our clone operation, we'll assemble a Promise chain to handle errors +and work with the `Git.Repository` instance result. + +``` javascript +cloneRepository.catch(errorAndAttemptOpen) + .then(function(repository) { + // Access any repository methods here. + console.log("Is the repository bare? %s", Boolean(repository.isBare())); + }); +``` diff --git a/guides/cloning/gh-two-factor/README.md b/guides/cloning/gh-two-factor/README.md new file mode 100644 index 000000000..1a9190ab3 --- /dev/null +++ b/guides/cloning/gh-two-factor/README.md @@ -0,0 +1,169 @@ +--- +layout: full +menu_item: guides +title: GitHub Two Factor Auth Guide +description: How to clone with GitHub Two Factor Authorization +--- + +**In order to run examples, you will need to [Install NodeGit](../../install) +first.** + +[Return to all guides](../../) + +* * * + +GitHub Two Factor Auth +---------------------- + +This guide explains how to clone a repository, and in the case of failure, +attempt to open the existing path. + +[View example source](index.js) + +### Requiring NodeGit + +In the guides directory, we like to keep our NodeGit relative to the project +root. + +``` javascript +var NodeGit = require("../../../"); +``` + +However, in your project you will most likely be using the following command: + +``` javascript +var NodeGit = require("nodegit"); +``` + +### GitHub Personal OAuth Token + +Before you can clone a repository, you'll need a GitHub OAuth application +token. You can find more information on generating one here: + +[Creating an access token for command-line use]( +https://help.github.com/articles/creating-an-access-token-for-command-line-use/ +) + +Once you have this token you'll assign it to a variable in your project, for +this example, we'll call it `GITHUB_TOKEN`. + +``` javascript +var GITHUB_TOKEN = ""; +``` + +Keep this variable a secret. If you accidentally commit this key to a public +GitHub repository they will immediately revoke it. + +### Clone URL + +The first argument to the `clone` method is a URL. + +In this example we're going to clone one of our private test repositories from +GitHub. This must be an `https` protocol URL for the clone to work. + +``` javascript +var cloneURL = "https://github.com/nodegit/private"; +``` + +### Clone path + +The second argument to the `clone` method is a path. + +Ideally your application will clone a repository into the same folder path +regardless of how or where you execute it from. Paths are relative to the +current working directory in NodeGit, so you will need to normalize it first. + +This is very simple in Node: + +``` javascript +var localPath = require("path").join(__dirname, "tmp"); +``` + +Now this `tmp` directory will be created along side your script, no matter how +or where you execute it from. + +### Clone options + +The third argument to the `clone` method is an optional simple object. + +``` javascript +var cloneOptions = {}; +``` + +#### GitHub certificate issue in OS X + +Unfortunately in OS X there is a problem where libgit2 is unable to look up +GitHub certificates correctly. In order to bypass this problem, we're going +to passthrough the certificate check. + +*Note: this is not a problem with Windows or Linux* + +``` javascript +cloneOptions.fetchOpts = { + callbacks: { + certificateCheck: function() { return 1; } + } +}; +``` + +#### GitHub credentials for Two Factor Auth + +In order to authorize the clone operation, we'll need to respond to a low-level +callback that expects credentials to be passed. + +This function will be attached below the above `certificateCheck`, and will +respond back with the OAuth token. + +The `fetchOpts` object now looks like this: + +``` javascript +cloneOptions.fetchOpts = { + callbacks: { + certificateCheck: function() { return 1; }, + credentials: function() { + return NodeGit.Cred.userpassPlaintextNew(GITHUB_TOKEN, "x-oauth-basic"); + } + } +}; +``` + +### Invoking the clone method + +You can easily invoke our top-level Clone as a function passing along the three +aforementioned arguments. + +``` javascript +var cloneRepository = NodeGit.Clone(cloneURL, localPath, cloneOptions); +``` + +Notice how we store the return value from `Git.Clone`. This is a +[Promise](https://www.promisejs.org/) to represent the asynchronous operation. +It offers finer control flow by allowing us to capture errors and fallback if +there is a clone failure. + +### Handling clone failure + +A naive way to handle a clone failure is to try opening the same path. Clones +will most commonly fail when the directory already exists. We can define +a function to attempt opening in this case. + +``` javascript +var errorAndAttemptOpen = function() { + return NodeGit.Repository.open(local); +}; +``` + +This will be called as part of the Promise resolution in the final step. + +### The Promise chain + +Lastly in our clone operation, we'll assemble a Promise chain to handle errors +and work with the `Git.Repository` instance result. + +``` javascript +cloneRepository.catch(errorAndAttemptOpen) + .then(function(repository) { + // Access any repository methods here. + console.log("Is the repository bare? %s", Boolean(repository.isBare())); + }); +``` diff --git a/guides/cloning/gh-two-factor/index.js b/guides/cloning/gh-two-factor/index.js new file mode 100644 index 000000000..d723e52cc --- /dev/null +++ b/guides/cloning/gh-two-factor/index.js @@ -0,0 +1,47 @@ +// Require in NodeGit, since we want to use the local copy, we"re using a +// relative path. In your project, you will use: +// +// var NodeGit = require("nodegit"); +var NodeGit = require("../../../"); + +// To clone with two factor auth enabled, you have to use a GitHub OAuth token +// over HTTPS. +var GITHUB_TOKEN = ""; + +// Using the `clone` method from the `Git.Clone` module, bring down the NodeGit +// test repository from GitHub. +var cloneURL = "https://github.com/nodegit/private"; + +// Ensure that the `tmp` directory is local to this file and not the CWD. +var localPath = require("path").join(__dirname, "tmp"); + +// Simple object to store clone options. +var cloneOptions = {}; + +// This is a required callback for OS X machines. There is a known issue +// with libgit2 being able to verify certificates from GitHub. +cloneOptions.fetchOpts = { + callbacks: { + certificateCheck: function() { return 1; }, + credentials: function() { + return NodeGit.Cred.userpassPlaintextNew(GITHUB_TOKEN, "x-oauth-basic"); + } + } +}; + +// Invoke the clone operation and store the returned Promise. +var cloneRepository = NodeGit.Clone(cloneURL, localPath, cloneOptions); + +// If the repository already exists, the clone above will fail. You can simply +// open the repository in this case to continue execution. +var errorAndAttemptOpen = function() { + return NodeGit.Repository.open(localPath); +}; + +// Once the repository has been cloned or opened, you can work with a returned +// `Git.Repository` instance. +cloneRepository.catch(errorAndAttemptOpen) + .then(function(repository) { + // Access any repository methods here. + console.log("Is the repository bare? %s", Boolean(repository.isBare())); + }); diff --git a/guides/cloning/index.js b/guides/cloning/index.js new file mode 100644 index 000000000..f6b7c7a37 --- /dev/null +++ b/guides/cloning/index.js @@ -0,0 +1,40 @@ +// Require in NodeGit, since we want to use the local copy, we're using a +// relative path. In your project, you will use: +// +// var NodeGit = require("nodegit"); +var NodeGit = require("../../../"); + +// Using the `clone` method from the `Git.Clone` module, bring down the NodeGit +// test repository from GitHub. +var cloneURL = "https://github.com/nodegit/test"; + +// Ensure that the `tmp` directory is local to this file and not the CWD. +var localPath = require("path").join(__dirname, "tmp"); + +// Simple object to store clone options. +var cloneOptions = {}; + +// This is a required callback for OS X machines. There is a known issue +// with libgit2 being able to verify certificates from GitHub. +cloneOptions.fetchOpts = { + callbacks: { + certificateCheck: function() { return 1; } + } +}; + +// Invoke the clone operation and store the returned Promise. +var cloneRepository = NodeGit.Clone(cloneURL, localPath, cloneOptions); + +// If the repository already exists, the clone above will fail. You can simply +// open the repository in this case to continue execution. +var errorAndAttemptOpen = function() { + return NodeGit.Repository.open(localPath); +}; + +// Once the repository has been cloned or opened, you can work with a returned +// `Git.Repository` instance. +cloneRepository.catch(errorAndAttemptOpen) + .then(function(repository) { + // Access any repository methods here. + console.log("Is the repository bare? %s", Boolean(repository.isBare())); + }); diff --git a/guides/cloning/ssh-with-agent/README.md b/guides/cloning/ssh-with-agent/README.md new file mode 100644 index 000000000..ae14cfe39 --- /dev/null +++ b/guides/cloning/ssh-with-agent/README.md @@ -0,0 +1,157 @@ +--- +layout: full +menu_item: guides +title: SSH w/ Agent Guide +description: How to clone with SSH using an agent +--- + +**In order to run examples, you will need to [Install NodeGit](../../install) +first.** + +[Return to all guides](../../) + +* * * + +SSH w/ Agent +------------ + +This guide explains how to clone a repository, and in the case of failure, +attempt to open the existing path. + +[View example source](index.js) + +### Requiring NodeGit + +In the guides directory, we like to keep our NodeGit relative to the project +root. + +``` javascript +var NodeGit = require("../../../"); +``` + +However, in your project you will most likely be using the following command: + +``` javascript +var NodeGit = require("nodegit"); +``` + +### Clone URL + +The first argument to the `clone` method is a URL. + +In this example we're going to clone one of our test repositories from GitHub. +You could easily substitute this with any valid Git repository that is +accessible by SSH. + +``` javascript +var cloneURL = "git@github.com:nodegit/test"; +``` + +### Clone path + +The second argument to the `clone` method is a path. + +Ideally your application will clone a repository into the same folder path +regardless of how or where you execute it from. Paths are relative to the +current working directory in NodeGit, so you will need to normalize it first. + +This is very simple in Node: + +``` javascript +var localPath = require("path").join(__dirname, "tmp"); +``` + +Now this `tmp` directory will be created along side your script, no matter how +or where you execute it from. + +### Clone options + +The third argument to the `clone` method is an optional simple object. + +``` javascript +var cloneOptions = {}; +``` + +#### GitHub certificate issue in OS X + +Unfortunately in OS X there is a problem where libgit2 is unable to look up +GitHub certificates correctly. In order to bypass this problem, we're going +to passthrough the certificate check. + +*Note: this is not a problem with Windows or Linux* + +``` javascript +cloneOptions.fetchOpts = { + callbacks: { + certificateCheck: function() { return 1; } + } +}; +``` + +#### SSH credentials via agent + +In order to authorize the clone operation, we'll need to respond to a low-level +callback that expects credentials to be passed. + +This function will be attached below the above `certificateCheck`, and will +respond back with the credentials from the agent. You'll notice we handle +the second argument passed to credentials, `userName`. + +The `fetchOpts` object now looks like this: + +``` javascript +cloneOptions.fetchOpts = { + callbacks: { + certificateCheck: function() { return 1; }, + credentials: function(url, userName) { + return NodeGit.Cred.sshKeyFromAgent(userName); + } + } +}; +``` + +There are many other methods you can use to authorize the SSH connection without +using an agent. For now they are documented in the [unit tests]( +https://github.com/nodegit/nodegit/blob/master/test/tests/clone.js +). + +### Invoking the clone method + +You can easily invoke our top-level Clone as a function passing along the three +aforementioned arguments. + +``` javascript +var cloneRepository = NodeGit.Clone(cloneURL, localPath, cloneOptions); +``` + +Notice how we store the return value from `Git.Clone`. This is a +[Promise](https://www.promisejs.org/) to represent the asynchronous operation. +It offers finer control flow by allowing us to capture errors and fallback if +there is a clone failure. + +### Handling clone failure + +A naive way to handle a clone failure is to try opening the same path. Clones +will most commonly fail when the directory already exists. We can define +a function to attempt opening in this case. + +``` javascript +var errorAndAttemptOpen = function() { + return NodeGit.Repository.open(local); +}; +``` + +This will be called as part of the Promise resolution in the final step. + +### The Promise chain + +Lastly in our clone operation, we'll assemble a Promise chain to handle errors +and work with the `Git.Repository` instance result. + +``` javascript +cloneRepository.catch(errorAndAttemptOpen) + .then(function(repository) { + // Access any repository methods here. + console.log("Is the repository bare? %s", Boolean(repository.isBare())); + }); +``` diff --git a/guides/cloning/ssh-with-agent/index.js b/guides/cloning/ssh-with-agent/index.js new file mode 100644 index 000000000..f3926392c --- /dev/null +++ b/guides/cloning/ssh-with-agent/index.js @@ -0,0 +1,46 @@ +// Require in NodeGit, since we want to use the local copy, we're using a +// relative path. In your project, you will use: +// +// var NodeGit = require("nodegit"); +var NodeGit = require("../../../"); + +// Set the URL that NodeGit will connect to clone. +var cloneURL = "git@github.com:nodegit/test"; + +// Ensure that the `tmp` directory is local to this file and not the CWD. +var localPath = require("path").join(__dirname, "tmp"); + +// Simple object to store clone options. +var cloneOptions = {}; + +// This is a required callback for OS X machines. There is a known issue +// with libgit2 being able to verify certificates from GitHub. +cloneOptions.fetchOpts = { + callbacks: { + certificateCheck: function() { return 1; }, + + // Credentials are passed two arguments, url and username. We forward the + // `userName` argument to the `sshKeyFromAgent` function to validate + // authentication. + credentials: function(url, userName) { + return NodeGit.Cred.sshKeyFromAgent(userName); + } + } +}; + +// Invoke the clone operation and store the returned Promise. +var cloneRepository = NodeGit.Clone(cloneURL, localPath, cloneOptions); + +// If the repository already exists, the clone above will fail. You can simply +// open the repository in this case to continue execution. +var errorAndAttemptOpen = function() { + return NodeGit.Repository.open(localPath); +}; + +// Once the repository has been cloned or opened, you can work with a returned +// `Git.Repository` instance. +cloneRepository.catch(errorAndAttemptOpen) + .then(function(repository) { + // Access any repository methods here. + console.log("Is the repository bare? %s", Boolean(repository.isBare())); + }); diff --git a/guides/install/README.md b/guides/install/README.md new file mode 100644 index 000000000..1cb727a1d --- /dev/null +++ b/guides/install/README.md @@ -0,0 +1,24 @@ +--- +layout: full +menu_item: guides +title: Install Basics +description: How to install NodeGit +--- + +[Return to all guides](../) + +* * * + +From NPM +------------------------------- + +To install from the NPM repository you can issue the following command: + +``` bash +npm install nodegit +``` + +From GitHub +------------------------------------- + +Check out our [build from source](./from-source) guides. diff --git a/guides/install/electron/README.md b/guides/install/electron/README.md new file mode 100644 index 000000000..99e67084a --- /dev/null +++ b/guides/install/electron/README.md @@ -0,0 +1,25 @@ +--- +layout: full +menu_item: guides +title: Electron +description: How to install NodeGit with Electron +--- + +[Return to all guides](../../) + +* * * + +Install for Electron +---------------------- + +Please see the official electron docs [here](https://github.com/electron/electron/blob/master/docs/tutorial/using-native-node-modules.md) + +For a slightly simpler version of the first method, use an `.npmrc` file. For example if you have an Electron app that's targeting version 1.2.8 your .npmrc file would look something like: +``` +runtime = electron +target = 1.2.8 +target_arch = x64 +disturl = https://atom.io/download/atom-shell +``` + +*NOTE: We try to provide prebuilt binaries for electron, but we don't always have them available for every version. If prebuilts are not available and you have trouble with local compilation, visit our [building guides](../from-source) for help* diff --git a/guides/install/from-source/README.md b/guides/install/from-source/README.md new file mode 100644 index 000000000..c85e2e3fa --- /dev/null +++ b/guides/install/from-source/README.md @@ -0,0 +1,87 @@ +--- +layout: full +menu_item: guides +title: From source +description: How to build NodeGit from source +--- + +[Return to all guides](../../) + +* * * + +Building from source +-------------------- + +If you wish to help contribute to NodeGit it is useful to build locally. + +``` bash +# Fetch this project. +git clone git://github.com/nodegit/nodegit.git + +# Enter the repository. +cd nodegit + +# Install all dependencies, run the code generation scripts, and build. +npm install +``` + +If you encounter errors, you most likely have not configured the dependencies correctly. + +### Installing dependencies: ### + +#### Mac OS X #### + +- [Install XCode Command Line Tools](http://railsapps.github.io/xcode-command-line-tools.html) + +#### Linux #### + +Using APT in Ubuntu: + +``` bash +sudo apt-get install build-essential +``` + +Using Pacman in Arch Linux: + +``` bash +sudo pacman -S base-devel +``` + +Note that GCC/G++ 4.7+ are required, as the library makes use of some c++11 std calls. + +#### Windows #### + +- [Download and install Python 2](https://www.python.org/download/windows). +- [Download and install VS Community](https://www.visualstudio.com/products/visual-studio-community-vs). + +You may have to add a build flag to the installation process to successfully install. +Try first without, if the build fails, try again with the flag. + +*Allegedly the order in which you install Visual Studio could trigger this error.* + +``` bash +npm install nodegit --msvs_version=2013 +# Or whatever version you've installed. +``` + +##### A note on environment variables in Windows ##### +In many of the npm scripts (and examples above), things are run like +`BUILD_ONLY=true npm install`. This sets the `BUILD_ONLY` environment variable +to true for the duration of that command. This doesn't work in windows, however +there is a solution. You can use cmd to call a command inside of cmd (very meta) +with the variable set, and it only lasts for the duration of the inner call to cmd. +So for the above example, you would run `cmd /C "set BUILD_ONLY=true && npm install"`. +See here for more details: +[SuperUser](http://superuser.com/questions/223104/setting-environment-variable-for-just-one-command-in-windows-cmd-exe). + +### Debug build: ### + +In order to track down possible bugs, you will need a debug buid so you +can get a backtrace with [gdb](http://www.gnu.org/software/gdb/) or +[lldb](http://lldb.llvm.org/). + +If you're building for the first time, run `npm run installDebug` (or `BUILD_ONLY=true npm link`) + +Note that you should run `rm -rf build/Release` (or `rd /s /q build/Release` in Windows) to make sure a release build doesn't get loaded instead of the debug build. + +If you're doing a subsequent rebuild of NodeGit in debug, the clean function will cause a lot of extraneous recompilation of things you probably didn't change (like the vendor dependencies). If you need to regenerate the C++ files and recompile you can run `npm run rebuildDebug`, or `npm run recompileDebug` if you've manually updated the C++ files and don't want them to regenerate. diff --git a/guides/install/nw.js/README.md b/guides/install/nw.js/README.md new file mode 100644 index 000000000..05b020b42 --- /dev/null +++ b/guides/install/nw.js/README.md @@ -0,0 +1,25 @@ +--- +layout: full +menu_item: guides +title: NW.js +description: How to install NodeGit with NW.js +--- + +[Return to all guides](../../) + +* * * + +Install for nwjs +---------------- + +Please see the official nw.js docs [here](http://docs.nwjs.io/en/latest/For%20Users/Advanced/Use%20Native%20Node%20Modules/#node-pre-gyp) + +For a slightly simpler version of the third method, use an `.npmrc` file. For example if you have an NW.js app that's targeting version 0.13.0, your `.npmrc` file would look something like: +``` +runtime = node-webkit +target = 0.13.0 +target_arch = x64 +disturl = http://node-webkit.s3.amazonaws.com +``` + +*NOTE: NW.js support is not thoroughly tested. Additionally, there are no pre-built binaries for NW.js, you must compile NodeGit. Visit our [building guides](../from-source) for help* diff --git a/guides/repositories/README.md b/guides/repositories/README.md new file mode 100644 index 000000000..a436a24c9 --- /dev/null +++ b/guides/repositories/README.md @@ -0,0 +1,127 @@ +--- +layout: full +menu_item: guides +title: Opening a Repository +description: How to open and free a repository +--- + +**In order to run examples, you will need to [Install NodeGit](../install) +first.** + +[Return to all guides](../) + +* * * + +Opening a Repository +-------------------- + +This guide explains how to open a repository, and how to work with errors in a +promise chain + +[View example source](index.js) + +### Requiring NodeGit + +In the guides directory, we like to keep our NodeGit relative to the project +root. + +``` javascript +var NodeGit = require("../../../"); +``` + +However, in your project you will most likely be using the following command: + +``` javascript +var NodeGit = require("nodegit"); +``` + +### Path to Repo + +The only argument to the `open` method is a path to the repo on disk. Here we +are calculating that from our current directory using the `path` object from +node. + +``` javascript +var pathToRepo = require("path").resolve("../my-git-projects/my-project"); +``` + +You can also point it directly to a `.git` folder to open as well + +``` javascript +var pathToRepo = require("path").resolve("../my-git-projects/my-project/.git"); +``` + +This is not necessary though as the function will check the passed directory +for the `.git` subdirectory + +### Open a Repo + +Now that we have our path to the repo we wish to open we can do so by calling +the `open` method on the `NodeGit.Repository` module + +``` javascript +NodeGit.Repository.open(pathToRepo).then(function (repo) { + // Inside of this function we have an open repo +}); +``` + +*NOTE: We use promises to perform operations in NodeGit. This allows the node event +loop to keep cycling through while under the hood our wrapped libgit2 code is +performing the actions we requested and we're not waiting for it. + +This allows our apps to remain responsive and performant. However if you're +not used to promises then this can take some getting used to. If you need +an introduction you can head over to https://www.promisejs.org/ for some +tutorials.* + +### Handling errors + +Promises will swallow errors if there isn't code to explicitly handle them. +You can do this through any of the following 3 ways. + +#### Providing a second function to the `.then` method + +You can pass a second function parameter to the `.then` method that will have +the reason why a promise failed in it's first argument. + +``` javascript +NodeGit.Repository.open(pathToRepo).then(function (successfulResult) { + // This is the first function of the then which contains the successfully + // calculated result of the promise +}, function (reasonForFailure) { + // This is the second function of the then which contains the reason the + // promise failed +}); +``` + +#### Including a `.catch` in a chain + +You can also append a `.catch` to the end of a promise chain which will +receive any promise failure that isn't previously caught + +``` javascript +NodeGit.Repository.open(pathToRepo).then(function (successfulResult) { + // This is the first function of the then which contains the successfully + // calculated result of the promise +}) +.catch(function (reasonForFailure) { + // failure is handled here +}); +``` + +#### Finishing a chain with `done` + +If you append a `.done` at the end of your chain, you will have any error that +wasn't previously handled by the above 2 methods thrown. + +``` javascript +NodeGit.Repository.open(pathToRepo).then(function (successfulResult) { + // This is the first function of the then which contains the successfully + // calculated result of the promise +}) +.done(function () { + // If we have a .done then the error will be thrown if there was an error that + // wasn't caught by either providing a 2nd function to the `.then` or a + // `.catch` function +}); +``` diff --git a/guides/repositories/index.js b/guides/repositories/index.js new file mode 100644 index 000000000..4c018f8f7 --- /dev/null +++ b/guides/repositories/index.js @@ -0,0 +1,39 @@ +// Require in NodeGit, since we want to use the local copy, we're using a +// relative path. In your project, you will use: +// +// var NodeGit = require("nodegit"); +var NodeGit = require("../../../"); + +// Using the `open` method from the `NodeGit.Repository` module, we can open +// a repository using NodeGit +var pathToRepo = require("path").resolve("../my-git-projects/my-project"); + +// In NodeGit we use Promises to make callbacks easier to deal with. +// +// For more information visit https://www.promisejs.org/ +NodeGit.Repository.open(pathToRepo).then(function (repo) { + // In this function we have a repo object that we can perform git operations + // on. + // NOTE: Many NodeGit objects will appear as empty objects if inspected in + // the console. This is a known issue. You can track it's progress at + // https://github.com/nodegit/nodegit/issues/307 +}) +// Promises will swallow errors and not report them unless you have supplied +// a second function to the `.then` or end the chain with either a `.catch` or +// a `.done` +.then(function (successfulResult) { + // This is the first function of the then which contains the successfully + // calculated result of the promise +}, function (reasonForFailure) { + // This is the second function of the then which contains the reason the + // promise failed +}) +.catch(function (reasonForFailure) { + // You can also provide a catch function which will contain the reason why + // any above promises that weren't handled have failed +}) +.done(function() { + // If we have a .done then the error will be thrown if there was an error that + // wasn't caught by either providing a 2nd function to the `.then` or a + // `.catch` function +}); diff --git a/guides/repositories/initializing/README.md b/guides/repositories/initializing/README.md new file mode 100644 index 000000000..90e5090d4 --- /dev/null +++ b/guides/repositories/initializing/README.md @@ -0,0 +1,63 @@ +--- +layout: full +menu_item: guides +title: Initializing +description: How to initialize a repository +--- + +**In order to run examples, you will need to [Install NodeGit](../../install) +first.** + +[Return to all guides](../../) + +* * * + +Initializing a Repository +------------------------- + +This guide explains how to create a new repository. + +[View example source](index.js) + +### Requiring NodeGit + +In the guides directory, we like to keep our NodeGit relative to the project +root. + +``` javascript +var NodeGit = require("../../../"); +``` + +However, in your project you will most likely be using the following command: + +``` javascript +var NodeGit = require("nodegit"); +``` + +### Arguments to initialize a repo + +There are 2 arguments to the `init` method, a path to initialize the repo in +and whether or not to make a `.git` subfolder in that directory or use the +passed in directory as the `.git` folder itself. + + +``` javascript +var pathToRepo = require("path").resolve("../my-git-projects/my-project"); +var isBare = 0; // lets create a .git subfolder +``` + + +### Initialize the Repo + +Now that we have our arguments we can call the `init` method on the +`NodeGit.Repository` module to create the repo. + +``` javascript +NodeGit.Repository.init(pathToRepo, isBare).then(function (repo) { + // In this function we have a repo object that we can perform git operations + // on. + + // Note that with a new repository many functions will fail until there is + // an initial commit. +}); +``` diff --git a/guides/repositories/initializing/index.js b/guides/repositories/initializing/index.js new file mode 100644 index 000000000..67470ca19 --- /dev/null +++ b/guides/repositories/initializing/index.js @@ -0,0 +1,29 @@ +// Require in NodeGit, since we want to use the local copy, we're using a +// relative path. In your project, you will use: +// +// var NodeGit = require("nodegit"); +var NodeGit = require("../../../"); + +// Using the `init` method from the `NodeGit.Repository` module, we can create +// a blank repository using NodeGit +var pathToRepo = require("path").resolve("../my-git-projects/my-project"); + +// Setting this to 0 will have the effect creating a `.git` folder inside of +// passed path. If this is 1 then we will use the passed path AS our `.git` +// folder. +var isBare = 0; + +// In NodeGit we use Promises to make callbacks easier to deal with. +// +// For more information visit https://www.promisejs.org/ +NodeGit.Repository.init(pathToRepo, isBare).then(function (repo) { + // In this function we have a repo object that we can perform git operations + // on. + + // Note that with a new repository many functions will fail until there is + // an initial commit. +}) +.catch(function (reasonForFailure) { + // If the repo cannot be created for any reason we can handle that case here. + // NodeGit won't init a repo over a pre-existing repo. +}); diff --git a/include/blob.h b/include/blob.h deleted file mode 100755 index 561338354..000000000 --- a/include/blob.h +++ /dev/null @@ -1,41 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ - -#ifndef GITBLOB_H -#define GITBLOB_H - -#include -#include -#include - -#include "git2.h" - -using namespace node; -using namespace v8; - -class GitBlob : public ObjectWrap { - public: - - static Persistent constructor_template; - static void Initialize (Handle target); - - git_blob *GetValue(); - - static Handle New(void *raw); - - private: - GitBlob(git_blob *raw); - ~GitBlob(); - - static Handle New(const Arguments& args); - - - static Handle Oid(const Arguments& args); - static Handle Content(const Arguments& args); - static Handle Size(const Arguments& args); - static Handle IsBinary(const Arguments& args); - git_blob *raw; -}; - -#endif diff --git a/include/branch.h b/include/branch.h deleted file mode 100644 index bfbf9bcdb..000000000 --- a/include/branch.h +++ /dev/null @@ -1,48 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ - -#ifndef BRANCH_H -#define BRANCH_H - -#include -#include -#include - -#include "git2.h" - -using namespace node; -using namespace v8; - -class Branch : public ObjectWrap { - public: - - static Persistent constructor_template; - static void Initialize (Handle target); - - git_branch *GetValue(); - - static Handle New(void *raw); - - private: - Branch(git_branch *raw); - ~Branch(); - - static Handle New(const Arguments& args); - - - static Handle Create(const Arguments& args); - static Handle Delete(const Arguments& args); - static Handle Foreach(const Arguments& args); - static Handle Move(const Arguments& args); - static Handle Lookup(const Arguments& args); - static Handle Name(const Arguments& args); - static Handle Upstream(const Arguments& args); - static Handle SetUpstream(const Arguments& args); - static Handle UpstreamName(const Arguments& args); - static Handle IsHead(const Arguments& args); - static Handle RemoteName(const Arguments& args); - git_branch *raw; -}; - -#endif diff --git a/include/clone_options.h b/include/clone_options.h deleted file mode 100644 index 14da5daad..000000000 --- a/include/clone_options.h +++ /dev/null @@ -1,37 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ - -#ifndef GITCLONEOPTIONS_H -#define GITCLONEOPTIONS_H - -#include -#include -#include - -#include "git2.h" - -using namespace node; -using namespace v8; - -class GitCloneOptions : public ObjectWrap { - public: - - static Persistent constructor_template; - static void Initialize (Handle target); - - git_clone_options *GetValue(); - - static Handle New(void *raw); - - private: - GitCloneOptions(git_clone_options *raw); - ~GitCloneOptions(); - - static Handle New(const Arguments& args); - - - git_clone_options *raw; -}; - -#endif diff --git a/include/commit.h b/include/commit.h deleted file mode 100755 index 6f69f5889..000000000 --- a/include/commit.h +++ /dev/null @@ -1,48 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ - -#ifndef GITCOMMIT_H -#define GITCOMMIT_H - -#include -#include -#include - -#include "git2.h" - -using namespace node; -using namespace v8; - -class GitCommit : public ObjectWrap { - public: - - static Persistent constructor_template; - static void Initialize (Handle target); - - git_commit *GetValue(); - - static Handle New(void *raw); - - private: - GitCommit(git_commit *raw); - ~GitCommit(); - - static Handle New(const Arguments& args); - - - static Handle Oid(const Arguments& args); - static Handle MessageEncoding(const Arguments& args); - static Handle Message(const Arguments& args); - static Handle Time(const Arguments& args); - static Handle Offset(const Arguments& args); - static Handle Committer(const Arguments& args); - static Handle Author(const Arguments& args); - static Handle TreeId(const Arguments& args); - static Handle ParentCount(const Arguments& args); - static Handle ParentId(const Arguments& args); - static Handle NthGenAncestor(const Arguments& args); - git_commit *raw; -}; - -#endif diff --git a/include/delta.h b/include/delta.h deleted file mode 100644 index e06f6283c..000000000 --- a/include/delta.h +++ /dev/null @@ -1,42 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ - -#ifndef GITDELTA_H -#define GITDELTA_H - -#include -#include -#include - -#include "git2.h" - -using namespace node; -using namespace v8; - -class GitDelta : public ObjectWrap { - public: - - static Persistent constructor_template; - static void Initialize (Handle target); - - git_diff_delta *GetValue(); - - static Handle New(void *raw); - - private: - GitDelta(git_diff_delta *raw); - ~GitDelta(); - - static Handle New(const Arguments& args); - - static Handle OldFile(const Arguments& args); - static Handle NewFile(const Arguments& args); - static Handle Status(const Arguments& args); - static Handle Similarity(const Arguments& args); - static Handle Flags(const Arguments& args); - - git_diff_delta *raw; -}; - -#endif diff --git a/include/diff.h b/include/diff.h deleted file mode 100644 index fa88badfa..000000000 --- a/include/diff.h +++ /dev/null @@ -1,111 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ - -#ifndef GITDIFF_H -#define GITDIFF_H - -#include -#include -#include - -#include "nan.h" - -#include "git2.h" - -using namespace node; -using namespace v8; - -class GitDiff : public ObjectWrap { - public: - - static Persistent constructor_template; - static void Initialize (Handle target); - - git_diff_list *GetValue(); - - private: - GitDiff(git_diff_list *raw); - ~GitDiff(); - - static NAN_METHOD(New); - - static NAN_METHOD(TreeToTree); - - static void TreeToTreeWork(uv_work_t* req); - static void TreeToTreeAfterWork(uv_work_t* req); - - struct TreeToTreeBaton { - uv_work_t request; - const git_error* error; - git_diff_list * diff; - Persistent repoReference; - git_repository * repo; - Persistent old_treeReference; - git_tree * old_tree; - Persistent new_treeReference; - git_tree * new_tree; - Persistent optsReference; - const git_diff_options * opts; - Persistent callback; - }; - static NAN_METHOD(TreeToIndex); - static void TreeToIndexWork(uv_work_t* req); - static void TreeToIndexAfterWork(uv_work_t* req); - - struct TreeToIndexBaton { - uv_work_t request; - const git_error* error; - git_diff_list * diff; - Persistent repoReference; - git_repository * repo; - Persistent old_treeReference; - git_tree * old_tree; - Persistent indexReference; - git_index * index; - Persistent optsReference; - const git_diff_options * opts; - Persistent callback; - }; - static NAN_METHOD(IndexToWorkdir); - static void IndexToWorkdirWork(uv_work_t* req); - static void IndexToWorkdirAfterWork(uv_work_t* req); - - struct IndexToWorkdirBaton { - uv_work_t request; - const git_error* error; - git_diff_list * diff; - Persistent repoReference; - git_repository * repo; - Persistent indexReference; - git_index * index; - Persistent optsReference; - const git_diff_options * opts; - Persistent callback; - }; - static NAN_METHOD(TreeToWorkdir); - static void TreeToWorkdirWork(uv_work_t* req); - static void TreeToWorkdirAfterWork(uv_work_t* req); - - struct TreeToWorkdirBaton { - uv_work_t request; - const git_error* error; - git_diff_list * diff; - Persistent repoReference; - git_repository * repo; - Persistent old_treeReference; - git_tree * old_tree; - Persistent optsReference; - const git_diff_options * opts; - Persistent callback; - }; - static NAN_METHOD(Merge); - static NAN_METHOD(FindSimilar); - static NAN_METHOD(StatusChar); - static NAN_METHOD(NumDeltas); - static NAN_METHOD(NumDeltasOfType); - static NAN_METHOD(GetPatch); - git_diff_list *raw; -}; - -#endif diff --git a/include/diff_file.h b/include/diff_file.h deleted file mode 100644 index f7f8210d0..000000000 --- a/include/diff_file.h +++ /dev/null @@ -1,42 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ - -#ifndef GITDIFFFILE_H -#define GITDIFFFILE_H - -#include -#include -#include - -#include "git2.h" - -using namespace node; -using namespace v8; - -class GitDiffFile : public ObjectWrap { - public: - - static Persistent constructor_template; - static void Initialize (Handle target); - - git_diff_file *GetValue(); - - static Handle New(void *raw); - - private: - GitDiffFile(git_diff_file *raw); - ~GitDiffFile(); - - static Handle New(const Arguments& args); - - static Handle Oid(const Arguments& args); - static Handle Path(const Arguments& args); - static Handle Size(const Arguments& args); - static Handle Flags(const Arguments& args); - static Handle Mode(const Arguments& args); - - git_diff_file *raw; -}; - -#endif diff --git a/include/diff_find_options.h b/include/diff_find_options.h deleted file mode 100644 index 3500d1b23..000000000 --- a/include/diff_find_options.h +++ /dev/null @@ -1,37 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ - -#ifndef GITDIFFFINDOPTIONS_H -#define GITDIFFFINDOPTIONS_H - -#include -#include -#include - -#include "git2.h" - -using namespace node; -using namespace v8; - -class GitDiffFindOptions : public ObjectWrap { - public: - - static Persistent constructor_template; - static void Initialize (Handle target); - - git_diff_find_options *GetValue(); - - static Handle New(void *raw); - - private: - GitDiffFindOptions(git_diff_find_options *raw); - ~GitDiffFindOptions(); - - static Handle New(const Arguments& args); - - - git_diff_find_options *raw; -}; - -#endif diff --git a/include/diff_list.h b/include/diff_list.h deleted file mode 100644 index 185a00e56..000000000 --- a/include/diff_list.h +++ /dev/null @@ -1,42 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ - -#ifndef GITDIFFLIST_H -#define GITDIFFLIST_H - -#include -#include -#include - -#include "git2.h" - -using namespace node; -using namespace v8; - -class GitDiffList : public ObjectWrap { - public: - - static Persistent constructor_template; - static void Initialize (Handle target); - - git_diff_list *GetValue(); - - static Handle New(void *raw); - - private: - GitDiffList(git_diff_list *raw); - ~GitDiffList(); - - static Handle New(const Arguments& args); - - - static Handle Merge(const Arguments& args); - static Handle FindSimilar(const Arguments& args); - static Handle Size(const Arguments& args); - static Handle NumDeltasOfType(const Arguments& args); - static Handle Patch(const Arguments& args); - git_diff_list *raw; -}; - -#endif diff --git a/include/diff_options.h b/include/diff_options.h deleted file mode 100644 index 6dd93a0b8..000000000 --- a/include/diff_options.h +++ /dev/null @@ -1,37 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ - -#ifndef GITDIFFOPTIONS_H -#define GITDIFFOPTIONS_H - -#include -#include -#include - -#include "git2.h" - -using namespace node; -using namespace v8; - -class GitDiffOptions : public ObjectWrap { - public: - - static Persistent constructor_template; - static void Initialize (Handle target); - - git_diff_options *GetValue(); - - static Handle New(void *raw); - - private: - GitDiffOptions(git_diff_options *raw); - ~GitDiffOptions(); - - static Handle New(const Arguments& args); - - - git_diff_options *raw; -}; - -#endif diff --git a/include/diff_range.h b/include/diff_range.h deleted file mode 100644 index bb44337b4..000000000 --- a/include/diff_range.h +++ /dev/null @@ -1,41 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ - -#ifndef GITDIFFRANGE_H -#define GITDIFFRANGE_H - -#include -#include -#include - -#include "git2.h" - -using namespace node; -using namespace v8; - -class GitDiffRange : public ObjectWrap { - public: - - static Persistent constructor_template; - static void Initialize (Handle target); - - git_diff_range *GetValue(); - - static Handle New(void *raw); - - private: - GitDiffRange(git_diff_range *raw); - ~GitDiffRange(); - - static Handle New(const Arguments& args); - - static Handle OldStart(const Arguments& args); - static Handle OldLines(const Arguments& args); - static Handle NewStart(const Arguments& args); - static Handle NewLines(const Arguments& args); - - git_diff_range *raw; -}; - -#endif diff --git a/include/index.h b/include/index.h deleted file mode 100755 index e634d7ea0..000000000 --- a/include/index.h +++ /dev/null @@ -1,142 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ - -#ifndef GITINDEX_H -#define GITINDEX_H - -#include -#include -#include - -#include "git2.h" - -using namespace node; -using namespace v8; - -class GitIndex : public ObjectWrap { - public: - - static Persistent constructor_template; - static void Initialize (Handle target); - - git_index *GetValue(); - - static Handle New(void *raw); - - private: - GitIndex(git_index *raw); - ~GitIndex(); - - static Handle New(const Arguments& args); - - - static Handle Open(const Arguments& args); - static void OpenWork(uv_work_t* req); - static void OpenAfterWork(uv_work_t* req); - - struct OpenBaton { - uv_work_t request; - int error_code; - const git_error* error; - git_index * out; - Persistent index_pathReference; - const char * index_path; - Persistent callback; - }; - static Handle Read(const Arguments& args); - static void ReadWork(uv_work_t* req); - static void ReadAfterWork(uv_work_t* req); - - struct ReadBaton { - uv_work_t request; - int error_code; - const git_error* error; - Persistent indexReference; - git_index * index; - Persistent callback; - }; - static Handle Write(const Arguments& args); - static void WriteWork(uv_work_t* req); - static void WriteAfterWork(uv_work_t* req); - - struct WriteBaton { - uv_work_t request; - int error_code; - const git_error* error; - Persistent indexReference; - git_index * index; - Persistent callback; - }; - static Handle ReadTree(const Arguments& args); - static void ReadTreeWork(uv_work_t* req); - static void ReadTreeAfterWork(uv_work_t* req); - - struct ReadTreeBaton { - uv_work_t request; - int error_code; - const git_error* error; - Persistent indexReference; - git_index * index; - Persistent treeReference; - const git_tree * tree; - Persistent callback; - }; - static Handle WriteTree(const Arguments& args); - static void WriteTreeWork(uv_work_t* req); - static void WriteTreeAfterWork(uv_work_t* req); - - struct WriteTreeBaton { - uv_work_t request; - int error_code; - const git_error* error; - git_oid * out; - Persistent indexReference; - git_index * index; - Persistent callback; - }; - static Handle Size(const Arguments& args); - static Handle Clear(const Arguments& args); - static Handle Entry(const Arguments& args); - static Handle Remove(const Arguments& args); - static Handle RemoveDirectory(const Arguments& args); - static Handle AddBypath(const Arguments& args); - static void AddBypathWork(uv_work_t* req); - static void AddBypathAfterWork(uv_work_t* req); - - struct AddBypathBaton { - uv_work_t request; - int error_code; - const git_error* error; - Persistent indexReference; - git_index * index; - Persistent pathReference; - const char * path; - Persistent callback; - }; - static Handle RemoveBypath(const Arguments& args); - static Handle Find(const Arguments& args); - static Handle ConflictRemove(const Arguments& args); - static Handle ConflictCleanup(const Arguments& args); - static Handle HasConflicts(const Arguments& args); - static Handle IndexToWorkdir(const Arguments& args); - static void IndexToWorkdirWork(uv_work_t* req); - static void IndexToWorkdirAfterWork(uv_work_t* req); - - struct IndexToWorkdirBaton { - uv_work_t request; - int error_code; - const git_error* error; - git_diff_list * diff; - Persistent repoReference; - git_repository * repo; - Persistent indexReference; - git_index * index; - Persistent optsReference; - const git_diff_options * opts; - Persistent callback; - }; - git_index *raw; -}; - -#endif diff --git a/include/index_entry.h b/include/index_entry.h deleted file mode 100644 index 7bf5fc6a8..000000000 --- a/include/index_entry.h +++ /dev/null @@ -1,49 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ - -#ifndef GITINDEXENTRY_H -#define GITINDEXENTRY_H - -#include -#include -#include - -#include "git2.h" - -using namespace node; -using namespace v8; - -class GitIndexEntry : public ObjectWrap { - public: - - static Persistent constructor_template; - static void Initialize (Handle target); - - git_index_entry *GetValue(); - - static Handle New(void *raw); - - private: - GitIndexEntry(git_index_entry *raw); - ~GitIndexEntry(); - - static Handle New(const Arguments& args); - - static Handle Ctime(const Arguments& args); - static Handle Mtime(const Arguments& args); - static Handle Dev(const Arguments& args); - static Handle Ino(const Arguments& args); - static Handle Mode(const Arguments& args); - static Handle Uid(const Arguments& args); - static Handle gid(const Arguments& args); - static Handle FileSize(const Arguments& args); - static Handle Oid(const Arguments& args); - static Handle Flags(const Arguments& args); - static Handle FlagsExtended(const Arguments& args); - static Handle Path(const Arguments& args); - - git_index_entry *raw; -}; - -#endif diff --git a/include/index_time.h b/include/index_time.h deleted file mode 100644 index 765ce6cc7..000000000 --- a/include/index_time.h +++ /dev/null @@ -1,39 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ - -#ifndef GITINDEXTIME_H -#define GITINDEXTIME_H - -#include -#include -#include - -#include "git2.h" - -using namespace node; -using namespace v8; - -class GitIndexTime : public ObjectWrap { - public: - - static Persistent constructor_template; - static void Initialize (Handle target); - - git_index_time *GetValue(); - - static Handle New(void *raw); - - private: - GitIndexTime(git_index_time *raw); - ~GitIndexTime(); - - static Handle New(const Arguments& args); - - static Handle Seconds(const Arguments& args); - static Handle Nanoseconds(const Arguments& args); - - git_index_time *raw; -}; - -#endif diff --git a/include/object.h b/include/object.h deleted file mode 100644 index 6ab9d3537..000000000 --- a/include/object.h +++ /dev/null @@ -1,54 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ - -#ifndef GITOBJECT_H -#define GITOBJECT_H - -#include -#include -#include - -#include "git2.h" - -using namespace node; -using namespace v8; - -class GitObject : public ObjectWrap { - public: - - static Persistent constructor_template; - static void Initialize (Handle target); - - git_object *GetValue(); - - static Handle New(void *raw); - - private: - GitObject(git_object *raw); - ~GitObject(); - - static Handle New(const Arguments& args); - - - static Handle Oid(const Arguments& args); - static Handle Type(const Arguments& args); - static Handle Peel(const Arguments& args); - static void PeelWork(uv_work_t* req); - static void PeelAfterWork(uv_work_t* req); - - struct PeelBaton { - uv_work_t request; - int error_code; - const git_error* error; - git_object * peeled; - Persistent objectReference; - const git_object * object; - Persistent target_typeReference; - git_otype target_type; - Persistent callback; - }; - git_object *raw; -}; - -#endif diff --git a/include/odb.h b/include/odb.h deleted file mode 100644 index 6ed9913d9..000000000 --- a/include/odb.h +++ /dev/null @@ -1,80 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ - -#ifndef GITODB_H -#define GITODB_H - -#include -#include -#include - -#include "git2.h" - -using namespace node; -using namespace v8; - -class GitOdb : public ObjectWrap { - public: - - static Persistent constructor_template; - static void Initialize (Handle target); - - git_odb *GetValue(); - - static Handle New(void *raw); - - private: - GitOdb(git_odb *raw); - ~GitOdb(); - - static Handle New(const Arguments& args); - - - static Handle Create(const Arguments& args); - static Handle Open(const Arguments& args); - static Handle AddDiskAlternate(const Arguments& args); - static Handle Read(const Arguments& args); - static void ReadWork(uv_work_t* req); - static void ReadAfterWork(uv_work_t* req); - - struct ReadBaton { - uv_work_t request; - int error_code; - const git_error* error; - git_odb_object * out; - Persistent dbReference; - git_odb * db; - Persistent idReference; - const git_oid * id; - Persistent callback; - }; - static Handle ReadPrefix(const Arguments& args); - static Handle ReadHeader(const Arguments& args); - static Handle Exists(const Arguments& args); - static Handle Refresh(const Arguments& args); - static Handle Write(const Arguments& args); - static void WriteWork(uv_work_t* req); - static void WriteAfterWork(uv_work_t* req); - - struct WriteBaton { - uv_work_t request; - int error_code; - const git_error* error; - git_oid * out; - Persistent odbReference; - git_odb * odb; - Persistent dataReference; - const void * data; - Persistent lenReference; - size_t len; - Persistent typeReference; - git_otype type; - Persistent callback; - }; - static Handle Hash(const Arguments& args); - static Handle Hashfile(const Arguments& args); - git_odb *raw; -}; - -#endif diff --git a/include/odb_object.h b/include/odb_object.h deleted file mode 100644 index 7938dfa49..000000000 --- a/include/odb_object.h +++ /dev/null @@ -1,41 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ - -#ifndef GITODBOBJECT_H -#define GITODBOBJECT_H - -#include -#include -#include - -#include "git2.h" - -using namespace node; -using namespace v8; - -class GitOdbObject : public ObjectWrap { - public: - - static Persistent constructor_template; - static void Initialize (Handle target); - - git_odb_object *GetValue(); - - static Handle New(void *raw); - - private: - GitOdbObject(git_odb_object *raw); - ~GitOdbObject(); - - static Handle New(const Arguments& args); - - - static Handle Data(const Arguments& args); - static Handle Size(const Arguments& args); - static Handle Type(const Arguments& args); - static Handle Oid(const Arguments& args); - git_odb_object *raw; -}; - -#endif diff --git a/include/oid.h b/include/oid.h deleted file mode 100755 index 2ce3a9e5c..000000000 --- a/include/oid.h +++ /dev/null @@ -1,39 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ - -#ifndef GITOID_H -#define GITOID_H - -#include -#include -#include - -#include "git2.h" - -using namespace node; -using namespace v8; - -class GitOid : public ObjectWrap { - public: - - static Persistent constructor_template; - static void Initialize (Handle target); - - git_oid *GetValue(); - - static Handle New(void *raw); - - private: - GitOid(git_oid *raw); - ~GitOid(); - - static Handle New(const Arguments& args); - - - static Handle FromString(const Arguments& args); - static Handle Sha(const Arguments& args); - git_oid *raw; -}; - -#endif diff --git a/include/patch.h b/include/patch.h deleted file mode 100644 index 7a4d408ff..000000000 --- a/include/patch.h +++ /dev/null @@ -1,44 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ - -#ifndef GITPATCH_H -#define GITPATCH_H - -#include -#include -#include - -#include "git2.h" - -using namespace node; -using namespace v8; - -class GitPatch : public ObjectWrap { - public: - - static Persistent constructor_template; - static void Initialize (Handle target); - - git_diff_patch *GetValue(); - - static Handle New(void *raw); - - private: - GitPatch(git_diff_patch *raw); - ~GitPatch(); - - static Handle New(const Arguments& args); - - - static Handle Delta(const Arguments& args); - static Handle Size(const Arguments& args); - static Handle Stats(const Arguments& args); - static Handle Hunk(const Arguments& args); - static Handle Lines(const Arguments& args); - static Handle Line(const Arguments& args); - static Handle ToString(const Arguments& args); - git_diff_patch *raw; -}; - -#endif diff --git a/include/refdb.h b/include/refdb.h deleted file mode 100644 index 51355028f..000000000 --- a/include/refdb.h +++ /dev/null @@ -1,37 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ - -#ifndef GITREFDB_H -#define GITREFDB_H - -#include -#include -#include - -#include "git2.h" - -using namespace node; -using namespace v8; - -class GitRefDb : public ObjectWrap { - public: - - static Persistent constructor_template; - static void Initialize (Handle target); - - git_refdb *GetValue(); - - static Handle New(void *raw); - - private: - GitRefDb(git_refdb *raw); - ~GitRefDb(); - - static Handle New(const Arguments& args); - - - git_refdb *raw; -}; - -#endif diff --git a/include/reference.h b/include/reference.h deleted file mode 100644 index ae7d18ad4..000000000 --- a/include/reference.h +++ /dev/null @@ -1,104 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ - -#ifndef GITREFERENCE_H -#define GITREFERENCE_H - -#include -#include -#include - -#include "git2.h" - -using namespace node; -using namespace v8; - -class GitReference : public ObjectWrap { - public: - - static Persistent constructor_template; - static void Initialize (Handle target); - - git_reference *GetValue(); - - static Handle New(void *raw); - - private: - GitReference(git_reference *raw); - ~GitReference(); - - static Handle New(const Arguments& args); - - - static Handle OidForName(const Arguments& args); - static void OidForNameWork(uv_work_t* req); - static void OidForNameAfterWork(uv_work_t* req); - - struct OidForNameBaton { - uv_work_t request; - int error_code; - const git_error* error; - git_oid * out; - Persistent repoReference; - git_repository * repo; - Persistent nameReference; - const char * name; - Persistent callback; - }; - static Handle Target(const Arguments& args); - static Handle SymbolicTarget(const Arguments& args); - static Handle Type(const Arguments& args); - static Handle Name(const Arguments& args); - static Handle Resolve(const Arguments& args); - static void ResolveWork(uv_work_t* req); - static void ResolveAfterWork(uv_work_t* req); - - struct ResolveBaton { - uv_work_t request; - int error_code; - const git_error* error; - git_reference * out; - Persistent refReference; - const git_reference * ref; - Persistent callback; - }; - static Handle SetSymbolicTarget(const Arguments& args); - static Handle setTarget(const Arguments& args); - static Handle Rename(const Arguments& args); - static void RenameWork(uv_work_t* req); - static void RenameAfterWork(uv_work_t* req); - - struct RenameBaton { - uv_work_t request; - int error_code; - const git_error* error; - git_reference * out; - Persistent refReference; - git_reference * ref; - Persistent new_nameReference; - const char * new_name; - Persistent forceReference; - int force; - Persistent callback; - }; - static Handle Delete(const Arguments& args); - static void DeleteWork(uv_work_t* req); - static void DeleteAfterWork(uv_work_t* req); - - struct DeleteBaton { - uv_work_t request; - int error_code; - const git_error* error; - Persistent refReference; - git_reference * ref; - Persistent callback; - }; - static Handle IsBranch(const Arguments& args); - static Handle IsRemote(const Arguments& args); - static Handle Peel(const Arguments& args); - static Handle IsValidName(const Arguments& args); - git_reference *raw; -}; - -#endif diff --git a/include/remote.h b/include/remote.h deleted file mode 100644 index 81fd1ac68..000000000 --- a/include/remote.h +++ /dev/null @@ -1,93 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ - -#ifndef GITREMOTE_H -#define GITREMOTE_H - -#include -#include -#include - -#include "git2.h" - -using namespace node; -using namespace v8; - -class GitRemote : public ObjectWrap { - public: - - static Persistent constructor_template; - static void Initialize (Handle target); - - git_remote *GetValue(); - - static Handle New(void *raw); - - private: - GitRemote(git_remote *raw); - ~GitRemote(); - - static Handle New(const Arguments& args); - - - static Handle Name(const Arguments& args); - static Handle Url(const Arguments& args); - static Handle PushUrl(const Arguments& args); - static Handle SetUrl(const Arguments& args); - static Handle SetPushUrl(const Arguments& args); - static Handle Connect(const Arguments& args); - static void ConnectWork(uv_work_t* req); - static void ConnectAfterWork(uv_work_t* req); - - struct ConnectBaton { - uv_work_t request; - int error_code; - const git_error* error; - Persistent remoteReference; - git_remote * remote; - Persistent directionReference; - git_direction direction; - Persistent callback; - }; - static Handle Download(const Arguments& args); - static void DownloadWork(uv_work_t* req); - static void DownloadAfterWork(uv_work_t* req); - - struct DownloadBaton { - uv_work_t request; - int error_code; - const git_error* error; - Persistent remoteReference; - git_remote * remote; - Persistent progress_cbReference; - git_transfer_progress_callback progress_cb; - Persistent payloadReference; - void * payload; - Persistent callback; - }; - static Handle Connected(const Arguments& args); - static Handle Stop(const Arguments& args); - static Handle Disconnect(const Arguments& args); - static void DisconnectWork(uv_work_t* req); - static void DisconnectAfterWork(uv_work_t* req); - - struct DisconnectBaton { - uv_work_t request; - int error_code; - const git_error* error; - Persistent remoteReference; - git_remote * remote; - Persistent callback; - }; - static Handle UpdateTips(const Arguments& args); - static Handle ValidUrl(const Arguments& args); - static Handle SupportedUrl(const Arguments& args); - static Handle CheckCert(const Arguments& args); - static Handle UpdateFetchhead(const Arguments& args); - static Handle SetUpdateFetchhead(const Arguments& args); - static Handle IsValidName(const Arguments& args); - git_remote *raw; -}; - -#endif diff --git a/include/repo.h b/include/repo.h deleted file mode 100755 index 8f10f3427..000000000 --- a/include/repo.h +++ /dev/null @@ -1,370 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ - -#ifndef GITREPO_H -#define GITREPO_H - -#include -#include -#include - -#include "git2.h" - -using namespace node; -using namespace v8; - -class GitRepo : public ObjectWrap { - public: - - static Persistent constructor_template; - static void Initialize (Handle target); - - git_repository *GetValue(); - - static Handle New(void *raw); - - private: - GitRepo(git_repository *raw); - ~GitRepo(); - - static Handle New(const Arguments& args); - - - static Handle Open(const Arguments& args); - static void OpenWork(uv_work_t* req); - static void OpenAfterWork(uv_work_t* req); - - struct OpenBaton { - uv_work_t request; - int error_code; - const git_error* error; - git_repository * out; - Persistent pathReference; - const char * path; - Persistent callback; - }; - static Handle Init(const Arguments& args); - static void InitWork(uv_work_t* req); - static void InitAfterWork(uv_work_t* req); - - struct InitBaton { - uv_work_t request; - int error_code; - const git_error* error; - git_repository * out; - Persistent pathReference; - const char * path; - Persistent is_bareReference; - unsigned is_bare; - Persistent callback; - }; - static Handle Path(const Arguments& args); - static Handle Workdir(const Arguments& args); - static Handle Odb(const Arguments& args); - static Handle openIndex(const Arguments& args); - static void openIndexWork(uv_work_t* req); - static void openIndexAfterWork(uv_work_t* req); - - struct openIndexBaton { - uv_work_t request; - int error_code; - const git_error* error; - git_index * out; - Persistent repoReference; - git_repository * repo; - Persistent callback; - }; - static Handle GetBlob(const Arguments& args); - static void GetBlobWork(uv_work_t* req); - static void GetBlobAfterWork(uv_work_t* req); - - struct GetBlobBaton { - uv_work_t request; - int error_code; - const git_error* error; - git_blob * blob; - Persistent repoReference; - git_repository * repo; - Persistent idReference; - const git_oid * id; - Persistent callback; - }; - static Handle GetCommit(const Arguments& args); - static void GetCommitWork(uv_work_t* req); - static void GetCommitAfterWork(uv_work_t* req); - - struct GetCommitBaton { - uv_work_t request; - int error_code; - const git_error* error; - git_commit * commit; - Persistent repoReference; - git_repository * repo; - Persistent idReference; - const git_oid * id; - Persistent callback; - }; - static Handle CreateCommit(const Arguments& args); - static void CreateCommitWork(uv_work_t* req); - static void CreateCommitAfterWork(uv_work_t* req); - - struct CreateCommitBaton { - uv_work_t request; - int error_code; - const git_error* error; - git_oid * id; - Persistent repoReference; - git_repository * repo; - Persistent update_refReference; - const char * update_ref; - Persistent authorReference; - const git_signature * author; - Persistent committerReference; - const git_signature * committer; - Persistent message_encodingReference; - const char * message_encoding; - Persistent messageReference; - const char * message; - Persistent treeReference; - const git_tree * tree; - Persistent parent_countReference; - int parent_count; - Persistent parentsReference; - const git_commit ** parents; - Persistent callback; - }; - static Handle GetObject(const Arguments& args); - static void GetObjectWork(uv_work_t* req); - static void GetObjectAfterWork(uv_work_t* req); - - struct GetObjectBaton { - uv_work_t request; - int error_code; - const git_error* error; - git_object * object; - Persistent repoReference; - git_repository * repo; - Persistent idReference; - const git_oid * id; - Persistent typeReference; - git_otype type; - Persistent callback; - }; - static Handle GetReference(const Arguments& args); - static void GetReferenceWork(uv_work_t* req); - static void GetReferenceAfterWork(uv_work_t* req); - - struct GetReferenceBaton { - uv_work_t request; - int error_code; - const git_error* error; - git_reference * out; - Persistent repoReference; - git_repository * repo; - Persistent nameReference; - const char * name; - Persistent callback; - }; - static Handle CreateSymbolicReference(const Arguments& args); - static Handle CreateReference(const Arguments& args); - static Handle AddRemote(const Arguments& args); - static void AddRemoteWork(uv_work_t* req); - static void AddRemoteAfterWork(uv_work_t* req); - - struct AddRemoteBaton { - uv_work_t request; - int error_code; - const git_error* error; - git_remote * out; - Persistent repoReference; - git_repository * repo; - Persistent nameReference; - const char * name; - Persistent urlReference; - const char * url; - Persistent callback; - }; - static Handle CreateRevWalk(const Arguments& args); - static Handle GetSubmodule(const Arguments& args); - static Handle AddSubmodule(const Arguments& args); - static Handle GetTag(const Arguments& args); - static void GetTagWork(uv_work_t* req); - static void GetTagAfterWork(uv_work_t* req); - - struct GetTagBaton { - uv_work_t request; - int error_code; - const git_error* error; - git_tag * out; - Persistent repoReference; - git_repository * repo; - Persistent idReference; - const git_oid * id; - Persistent callback; - }; - static Handle CreateTag(const Arguments& args); - static void CreateTagWork(uv_work_t* req); - static void CreateTagAfterWork(uv_work_t* req); - - struct CreateTagBaton { - uv_work_t request; - int error_code; - const git_error* error; - git_oid * oid; - Persistent repoReference; - git_repository * repo; - Persistent tag_nameReference; - const char * tag_name; - Persistent targetReference; - const git_object * target; - Persistent taggerReference; - const git_signature * tagger; - Persistent messageReference; - const char * message; - Persistent forceReference; - int force; - Persistent callback; - }; - static Handle CreateLightweightTag(const Arguments& args); - static void CreateLightweightTagWork(uv_work_t* req); - static void CreateLightweightTagAfterWork(uv_work_t* req); - - struct CreateLightweightTagBaton { - uv_work_t request; - int error_code; - const git_error* error; - git_oid * oid; - Persistent repoReference; - git_repository * repo; - Persistent tag_nameReference; - const char * tag_name; - Persistent targetReference; - const git_object * target; - Persistent forceReference; - int force; - Persistent callback; - }; - static Handle GetTree(const Arguments& args); - static void GetTreeWork(uv_work_t* req); - static void GetTreeAfterWork(uv_work_t* req); - - struct GetTreeBaton { - uv_work_t request; - int error_code; - const git_error* error; - git_tree * out; - Persistent repoReference; - git_repository * repo; - Persistent idReference; - const git_oid * id; - Persistent callback; - }; - static Handle ReloadSubmodules(const Arguments& args); - static void ReloadSubmodulesWork(uv_work_t* req); - static void ReloadSubmodulesAfterWork(uv_work_t* req); - - struct ReloadSubmodulesBaton { - uv_work_t request; - int error_code; - const git_error* error; - Persistent repoReference; - git_repository * repo; - Persistent callback; - }; - static Handle Delete(const Arguments& args); - static void DeleteWork(uv_work_t* req); - static void DeleteAfterWork(uv_work_t* req); - - struct DeleteBaton { - uv_work_t request; - int error_code; - const git_error* error; - Persistent repoReference; - git_repository * repo; - Persistent tag_nameReference; - const char * tag_name; - Persistent callback; - }; - static Handle GetReferences(const Arguments& args); - static void GetReferencesWork(uv_work_t* req); - static void GetReferencesAfterWork(uv_work_t* req); - - struct GetReferencesBaton { - uv_work_t request; - int error_code; - const git_error* error; - git_strarray * array; - Persistent repoReference; - git_repository * repo; - Persistent list_flagsReference; - unsigned int list_flags; - Persistent callback; - }; - static Handle CreateBlobFromBuffer(const Arguments& args); - static void CreateBlobFromBufferWork(uv_work_t* req); - static void CreateBlobFromBufferAfterWork(uv_work_t* req); - - struct CreateBlobFromBufferBaton { - uv_work_t request; - int error_code; - const git_error* error; - git_oid * oid; - Persistent repoReference; - git_repository * repo; - Persistent bufferReference; - const void * buffer; - Persistent lenReference; - size_t len; - Persistent callback; - }; - static Handle CreateBlobFromFile(const Arguments& args); - static void CreateBlobFromFileWork(uv_work_t* req); - static void CreateBlobFromFileAfterWork(uv_work_t* req); - - struct CreateBlobFromFileBaton { - uv_work_t request; - int error_code; - const git_error* error; - git_oid * id; - Persistent repoReference; - git_repository * repo; - Persistent pathReference; - const char * path; - Persistent callback; - }; - static Handle GetRemotes(const Arguments& args); - static void GetRemotesWork(uv_work_t* req); - static void GetRemotesAfterWork(uv_work_t* req); - - struct GetRemotesBaton { - uv_work_t request; - int error_code; - const git_error* error; - git_strarray * out; - Persistent repoReference; - git_repository * repo; - Persistent callback; - }; - static Handle Clone(const Arguments& args); - static void CloneWork(uv_work_t* req); - static void CloneAfterWork(uv_work_t* req); - - struct CloneBaton { - uv_work_t request; - int error_code; - const git_error* error; - git_repository * out; - Persistent urlReference; - const char * url; - Persistent local_pathReference; - const char * local_path; - Persistent optionsReference; - const git_clone_options * options; - Persistent callback; - }; - static Handle GetRemote(const Arguments& args); - git_repository *raw; -}; - -#endif diff --git a/include/revwalk.h b/include/revwalk.h deleted file mode 100755 index c2096cc61..000000000 --- a/include/revwalk.h +++ /dev/null @@ -1,160 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ - -#ifndef GITREVWALK_H -#define GITREVWALK_H - -#include -#include -#include - -#include "git2.h" - -using namespace node; -using namespace v8; - -class GitRevWalk : public ObjectWrap { - public: - - static Persistent constructor_template; - static void Initialize (Handle target); - - git_revwalk *GetValue(); - - static Handle New(void *raw); - - private: - GitRevWalk(git_revwalk *raw); - ~GitRevWalk(); - - static Handle New(const Arguments& args); - - - static Handle Reset(const Arguments& args); - static Handle Push(const Arguments& args); - static void PushWork(uv_work_t* req); - static void PushAfterWork(uv_work_t* req); - - struct PushBaton { - uv_work_t request; - int error_code; - const git_error* error; - Persistent walkReference; - git_revwalk * walk; - Persistent idReference; - const git_oid * id; - Persistent callback; - }; - static Handle PushGlob(const Arguments& args); - static void PushGlobWork(uv_work_t* req); - static void PushGlobAfterWork(uv_work_t* req); - - struct PushGlobBaton { - uv_work_t request; - int error_code; - const git_error* error; - Persistent walkReference; - git_revwalk * walk; - Persistent globReference; - const char * glob; - Persistent callback; - }; - static Handle PushHead(const Arguments& args); - static void PushHeadWork(uv_work_t* req); - static void PushHeadAfterWork(uv_work_t* req); - - struct PushHeadBaton { - uv_work_t request; - int error_code; - const git_error* error; - Persistent walkReference; - git_revwalk * walk; - Persistent callback; - }; - static Handle Hide(const Arguments& args); - static void HideWork(uv_work_t* req); - static void HideAfterWork(uv_work_t* req); - - struct HideBaton { - uv_work_t request; - int error_code; - const git_error* error; - Persistent walkReference; - git_revwalk * walk; - Persistent commit_idReference; - const git_oid * commit_id; - Persistent callback; - }; - static Handle HideGlob(const Arguments& args); - static void HideGlobWork(uv_work_t* req); - static void HideGlobAfterWork(uv_work_t* req); - - struct HideGlobBaton { - uv_work_t request; - int error_code; - const git_error* error; - Persistent walkReference; - git_revwalk * walk; - Persistent globReference; - const char * glob; - Persistent callback; - }; - static Handle HideHead(const Arguments& args); - static void HideHeadWork(uv_work_t* req); - static void HideHeadAfterWork(uv_work_t* req); - - struct HideHeadBaton { - uv_work_t request; - int error_code; - const git_error* error; - Persistent walkReference; - git_revwalk * walk; - Persistent callback; - }; - static Handle PushRef(const Arguments& args); - static void PushRefWork(uv_work_t* req); - static void PushRefAfterWork(uv_work_t* req); - - struct PushRefBaton { - uv_work_t request; - int error_code; - const git_error* error; - Persistent walkReference; - git_revwalk * walk; - Persistent refnameReference; - const char * refname; - Persistent callback; - }; - static Handle HideRef(const Arguments& args); - static void HideRefWork(uv_work_t* req); - static void HideRefAfterWork(uv_work_t* req); - - struct HideRefBaton { - uv_work_t request; - int error_code; - const git_error* error; - Persistent walkReference; - git_revwalk * walk; - Persistent refnameReference; - const char * refname; - Persistent callback; - }; - static Handle Next(const Arguments& args); - static void NextWork(uv_work_t* req); - static void NextAfterWork(uv_work_t* req); - - struct NextBaton { - uv_work_t request; - int error_code; - const git_error* error; - git_oid * out; - Persistent walkReference; - git_revwalk * walk; - Persistent callback; - }; - static Handle Sorting(const Arguments& args); - git_revwalk *raw; -}; - -#endif diff --git a/include/signature.h b/include/signature.h deleted file mode 100755 index 14d542c82..000000000 --- a/include/signature.h +++ /dev/null @@ -1,42 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ - -#ifndef GITSIGNATURE_H -#define GITSIGNATURE_H - -#include -#include -#include - -#include "git2.h" - -using namespace node; -using namespace v8; - -class GitSignature : public ObjectWrap { - public: - - static Persistent constructor_template; - static void Initialize (Handle target); - - git_signature *GetValue(); - - static Handle New(void *raw); - - private: - GitSignature(git_signature *raw); - ~GitSignature(); - - static Handle New(const Arguments& args); - - static Handle Name(const Arguments& args); - static Handle Email(const Arguments& args); - static Handle Time(const Arguments& args); - - static Handle Create(const Arguments& args); - static Handle Now(const Arguments& args); - git_signature *raw; -}; - -#endif diff --git a/include/submodule.h b/include/submodule.h deleted file mode 100644 index f80d8ee3d..000000000 --- a/include/submodule.h +++ /dev/null @@ -1,146 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ - -#ifndef GITSUBMODULE_H -#define GITSUBMODULE_H - -#include -#include -#include - -#include "git2.h" - -using namespace node; -using namespace v8; - -class GitSubmodule : public ObjectWrap { - public: - - static Persistent constructor_template; - static void Initialize (Handle target); - - git_submodule *GetValue(); - - static Handle New(void *raw); - - private: - GitSubmodule(git_submodule *raw); - ~GitSubmodule(); - - static Handle New(const Arguments& args); - - - static Handle AddFinalize(const Arguments& args); - static void AddFinalizeWork(uv_work_t* req); - static void AddFinalizeAfterWork(uv_work_t* req); - - struct AddFinalizeBaton { - uv_work_t request; - int error_code; - const git_error* error; - Persistent submoduleReference; - git_submodule * submodule; - Persistent callback; - }; - static Handle AddToIndex(const Arguments& args); - static void AddToIndexWork(uv_work_t* req); - static void AddToIndexAfterWork(uv_work_t* req); - - struct AddToIndexBaton { - uv_work_t request; - int error_code; - const git_error* error; - Persistent submoduleReference; - git_submodule * submodule; - Persistent write_indexReference; - int write_index; - Persistent callback; - }; - static Handle Save(const Arguments& args); - static void SaveWork(uv_work_t* req); - static void SaveAfterWork(uv_work_t* req); - - struct SaveBaton { - uv_work_t request; - int error_code; - const git_error* error; - Persistent submoduleReference; - git_submodule * submodule; - Persistent callback; - }; - static Handle Name(const Arguments& args); - static Handle Path(const Arguments& args); - static Handle Url(const Arguments& args); - static Handle SetUrl(const Arguments& args); - static Handle IndexId(const Arguments& args); - static Handle HeadId(const Arguments& args); - static Handle Init(const Arguments& args); - static void InitWork(uv_work_t* req); - static void InitAfterWork(uv_work_t* req); - - struct InitBaton { - uv_work_t request; - int error_code; - const git_error* error; - Persistent submoduleReference; - git_submodule * submodule; - Persistent overwriteReference; - int overwrite; - Persistent callback; - }; - static Handle Sync(const Arguments& args); - static void SyncWork(uv_work_t* req); - static void SyncAfterWork(uv_work_t* req); - - struct SyncBaton { - uv_work_t request; - int error_code; - const git_error* error; - Persistent submoduleReference; - git_submodule * submodule; - Persistent callback; - }; - static Handle Open(const Arguments& args); - static void OpenWork(uv_work_t* req); - static void OpenAfterWork(uv_work_t* req); - - struct OpenBaton { - uv_work_t request; - int error_code; - const git_error* error; - git_repository * repo; - Persistent submoduleReference; - git_submodule * submodule; - Persistent callback; - }; - static Handle Reload(const Arguments& args); - static void ReloadWork(uv_work_t* req); - static void ReloadAfterWork(uv_work_t* req); - - struct ReloadBaton { - uv_work_t request; - int error_code; - const git_error* error; - Persistent submoduleReference; - git_submodule * submodule; - Persistent callback; - }; - static Handle Status(const Arguments& args); - static void StatusWork(uv_work_t* req); - static void StatusAfterWork(uv_work_t* req); - - struct StatusBaton { - uv_work_t request; - int error_code; - const git_error* error; - Persistent statusReference; - unsigned int * status; - Persistent submoduleReference; - git_submodule * submodule; - Persistent callback; - }; - git_submodule *raw; -}; - -#endif diff --git a/include/tag.h b/include/tag.h deleted file mode 100755 index f89ded8b1..000000000 --- a/include/tag.h +++ /dev/null @@ -1,57 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ - -#ifndef GITTAG_H -#define GITTAG_H - -#include -#include -#include - -#include "git2.h" - -using namespace node; -using namespace v8; - -class GitTag : public ObjectWrap { - public: - - static Persistent constructor_template; - static void Initialize (Handle target); - - git_tag *GetValue(); - - static Handle New(void *raw); - - private: - GitTag(git_tag *raw); - ~GitTag(); - - static Handle New(const Arguments& args); - - - static Handle Oid(const Arguments& args); - static Handle GetTarget(const Arguments& args); - static void GetTargetWork(uv_work_t* req); - static void GetTargetAfterWork(uv_work_t* req); - - struct GetTargetBaton { - uv_work_t request; - int error_code; - const git_error* error; - git_object * target_out; - Persistent tagReference; - const git_tag * tag; - Persistent callback; - }; - static Handle TargetId(const Arguments& args); - static Handle TargetType(const Arguments& args); - static Handle Name(const Arguments& args); - static Handle Tagger(const Arguments& args); - static Handle Message(const Arguments& args); - static Handle Peel(const Arguments& args); - git_tag *raw; -}; - -#endif diff --git a/include/threads.h b/include/threads.h deleted file mode 100755 index 7a67b0295..000000000 --- a/include/threads.h +++ /dev/null @@ -1,33 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ - -#ifndef GITTHREADS_H -#define GITTHREADS_H - -#include -#include -#include - -#include "git2.h" - -using namespace node; -using namespace v8; - -class GitThreads : public ObjectWrap { - public: - - static Persistent constructor_template; - static void Initialize (Handle target); - - - private: - - static Handle New(const Arguments& args); - - - static Handle Init(const Arguments& args); - static Handle Shutdown(const Arguments& args); -}; - -#endif diff --git a/include/time.h b/include/time.h deleted file mode 100644 index b742a88b0..000000000 --- a/include/time.h +++ /dev/null @@ -1,39 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ - -#ifndef GITTIME_H -#define GITTIME_H - -#include -#include -#include - -#include "git2.h" - -using namespace node; -using namespace v8; - -class GitTime : public ObjectWrap { - public: - - static Persistent constructor_template; - static void Initialize (Handle target); - - git_time *GetValue(); - - static Handle New(void *raw); - - private: - GitTime(git_time *raw); - ~GitTime(); - - static Handle New(const Arguments& args); - - static Handle Time(const Arguments& args); - static Handle Offset(const Arguments& args); - - git_time *raw; -}; - -#endif diff --git a/include/tree.h b/include/tree.h deleted file mode 100755 index 5462a82b1..000000000 --- a/include/tree.h +++ /dev/null @@ -1,113 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ - -#ifndef GITTREE_H -#define GITTREE_H - -#include -#include -#include - -#include "git2.h" - -using namespace node; -using namespace v8; - -class GitTree : public ObjectWrap { - public: - - static Persistent constructor_template; - static void Initialize (Handle target); - - git_tree *GetValue(); - - static Handle New(void *raw); - - private: - GitTree(git_tree *raw); - ~GitTree(); - - static Handle New(const Arguments& args); - - - static Handle Oid(const Arguments& args); - static Handle Size(const Arguments& args); - static Handle EntryByName(const Arguments& args); - static Handle EntryByIndex(const Arguments& args); - static Handle EntryByOid(const Arguments& args); - static Handle GetEntry(const Arguments& args); - static void GetEntryWork(uv_work_t* req); - static void GetEntryAfterWork(uv_work_t* req); - - struct GetEntryBaton { - uv_work_t request; - int error_code; - const git_error* error; - git_tree_entry * out; - Persistent rootReference; - git_tree * root; - Persistent pathReference; - const char * path; - Persistent callback; - }; - static Handle Builder(const Arguments& args); - static Handle DiffTree(const Arguments& args); - static void DiffTreeWork(uv_work_t* req); - static void DiffTreeAfterWork(uv_work_t* req); - - struct DiffTreeBaton { - uv_work_t request; - int error_code; - const git_error* error; - git_diff_list * diff; - Persistent repoReference; - git_repository * repo; - Persistent old_treeReference; - git_tree * old_tree; - Persistent new_treeReference; - git_tree * new_tree; - Persistent optsReference; - const git_diff_options * opts; - Persistent callback; - }; - static Handle DiffIndex(const Arguments& args); - static void DiffIndexWork(uv_work_t* req); - static void DiffIndexAfterWork(uv_work_t* req); - - struct DiffIndexBaton { - uv_work_t request; - int error_code; - const git_error* error; - git_diff_list * diff; - Persistent repoReference; - git_repository * repo; - Persistent old_treeReference; - git_tree * old_tree; - Persistent indexReference; - git_index * index; - Persistent optsReference; - const git_diff_options * opts; - Persistent callback; - }; - static Handle DiffWorkDir(const Arguments& args); - static void DiffWorkDirWork(uv_work_t* req); - static void DiffWorkDirAfterWork(uv_work_t* req); - - struct DiffWorkDirBaton { - uv_work_t request; - int error_code; - const git_error* error; - git_diff_list * diff; - Persistent repoReference; - git_repository * repo; - Persistent old_treeReference; - git_tree * old_tree; - Persistent optsReference; - const git_diff_options * opts; - Persistent callback; - }; - git_tree *raw; -}; - -#endif diff --git a/include/tree_builder.h b/include/tree_builder.h deleted file mode 100644 index 93d08a69f..000000000 --- a/include/tree_builder.h +++ /dev/null @@ -1,58 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ - -#ifndef GITTREEBUILDER_H -#define GITTREEBUILDER_H - -#include -#include -#include - -#include "git2.h" - -using namespace node; -using namespace v8; - -class GitTreeBuilder : public ObjectWrap { - public: - - static Persistent constructor_template; - static void Initialize (Handle target); - - git_treebuilder *GetValue(); - - static Handle New(void *raw); - - private: - GitTreeBuilder(git_treebuilder *raw); - ~GitTreeBuilder(); - - static Handle New(const Arguments& args); - - - static Handle Create(const Arguments& args); - static Handle Clear(const Arguments& args); - static Handle Size(const Arguments& args); - static Handle Get(const Arguments& args); - static Handle Insert(const Arguments& args); - static Handle GitTreebuilderRemove(const Arguments& args); - static Handle Write(const Arguments& args); - static void WriteWork(uv_work_t* req); - static void WriteAfterWork(uv_work_t* req); - - struct WriteBaton { - uv_work_t request; - int error_code; - const git_error* error; - git_oid * id; - Persistent repoReference; - git_repository * repo; - Persistent bldReference; - git_treebuilder * bld; - Persistent callback; - }; - git_treebuilder *raw; -}; - -#endif diff --git a/include/tree_entry.h b/include/tree_entry.h deleted file mode 100755 index 7c78ec1e9..000000000 --- a/include/tree_entry.h +++ /dev/null @@ -1,56 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ - -#ifndef GITTREEENTRY_H -#define GITTREEENTRY_H - -#include -#include -#include - -#include "git2.h" - -using namespace node; -using namespace v8; - -class GitTreeEntry : public ObjectWrap { - public: - - static Persistent constructor_template; - static void Initialize (Handle target); - - git_tree_entry *GetValue(); - - static Handle New(void *raw); - - private: - GitTreeEntry(git_tree_entry *raw); - ~GitTreeEntry(); - - static Handle New(const Arguments& args); - - - static Handle Name(const Arguments& args); - static Handle Oid(const Arguments& args); - static Handle Type(const Arguments& args); - static Handle filemode(const Arguments& args); - static Handle GetObject(const Arguments& args); - static void GetObjectWork(uv_work_t* req); - static void GetObjectAfterWork(uv_work_t* req); - - struct GetObjectBaton { - uv_work_t request; - int error_code; - const git_error* error; - git_object * object_out; - Persistent repoReference; - git_repository * repo; - Persistent entryReference; - const git_tree_entry * entry; - Persistent callback; - }; - git_tree_entry *raw; -}; - -#endif diff --git a/include/wrapper.h b/include/wrapper.h deleted file mode 100644 index 5cb98063e..000000000 --- a/include/wrapper.h +++ /dev/null @@ -1,34 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ - -#ifndef WRAPPER_H -#define WRAPPER_H - -#include -#include - -#include "nan.h" - -using namespace node; -using namespace v8; - -class Wrapper : public ObjectWrap { - public: - - static Persistent constructor_template; - static void Initialize (Handle target); - - void *GetValue(); - static Handle New(void *raw); - - private: - Wrapper(void *raw); - - static NAN_METHOD(New); - static NAN_METHOD(ToBuffer); - - void *raw; -}; - -#endif diff --git a/index.js b/index.js deleted file mode 100755 index 53a54654a..000000000 --- a/index.js +++ /dev/null @@ -1,43 +0,0 @@ -// Used to detect for Cygwin -var os = require('os'); - -// Required for Windows/Cygwin support -var root = [__dirname, '/vendor/libgit2/build/shared'].join(''), - path = process.env.PATH; - -if (~os.type().indexOf('CYGWIN') && !~path.indexOf(root)) { - process.env.PATH = root + ':' + path; -} - -// Assign raw api to module -var rawApi; -try { - rawApi = require('./build/Release/nodegit'); -} catch (e) { - rawApi = require('./build/Debug/nodegit'); -} - -// Set the exports prototype to the raw API. -exports.__proto__ = rawApi; - -// Import extensions -require('./lib/commit.js'); -require('./lib/blob.js'); -require('./lib/object.js'); -require('./lib/signature.js'); -require('./lib/odb.js'); -require('./lib/oid.js'); -require('./lib/index.js'); -require('./lib/repo.js'); -require('./lib/reference.js'); -require('./lib/revwalk.js'); -require('./lib/tree.js'); -require('./lib/diff_list.js'); -require('./lib/tree_entry.js'); -require('./lib/tree_builder.js'); - -// Set version -exports.version = require('./package').version; - -// Initialize threads -exports.Threads.init(); diff --git a/install.js b/install.js deleted file mode 100644 index 5c987f5fb..000000000 --- a/install.js +++ /dev/null @@ -1,193 +0,0 @@ -// Core Node.js modules. -var os = require('os'); -var fs = require('fs'); -var path = require('path'); -var zlib = require('zlib'); -var exec = require('child_process').exec; - -// Third-party modules. -var Q = require('q'); -var request = require('request'); -var tar = require('tar'); -var which = require('which'); -var rimraf = require('rimraf'); - -// This will take in an object and find any matching keys in the environment -// to use as overrides. -// -// ENV variables: -// -// PKG: Location of `package.json` sans `.json`. -// LIBGIT2: Location of libgit2 source. -// BUILD: Location of nodegit build directory. -function envOverride(obj) { - // Look through all keys. - return Object.keys(obj).reduce(function(obj, key) { - var normalize = key.toUpperCase(); - - // Check for process environment existence. - if (normalize in process.env) { - obj[key] = process.env[normalize]; - } - - return obj; - }, obj); -} - -// Convert to the correct system path. -function systemPath(parts) { - return parts.join(path.sep); -} - -// Will be used near the end to configure `node-gyp`. -var python, cmake; - -// Common reusable paths that can be overwritten by environment variables. -var paths = envOverride({ - pkg: __dirname + '/package', - libgit2: __dirname + '/vendor/libgit2/', - build: __dirname + '/vendor/libgit2/build/', -}); - -// Load the package.json. -var pkg = require(paths.pkg); - -// Ensure all dependencies are available. -var dependencies = Q.allSettled([ - // This will prioritize `python2` over `python`, because we always want to - // work with Python 2.* if it's available. - Q.nfcall(which, 'python2'), - Q.nfcall(which, 'python'), - - // Check for any version of CMake. - Q.nfcall(which, 'cmake'), -]) - -// Determine if all the dependency requirements are met. -.then(function(results) { - console.info('[nodegit] Determining dependencies.'); - - // Assign to reusable variables. - python = results[0].value || results[1].value; - cmake = results[2].value; - - // Missing Python. - if (!python) { - throw new Error('Python is required to build libgit2.'); - } - - // Missing CMake. - if (!cmake) { - throw new Error('CMake is required to build libgit2.'); - } - - // Now lets check the Python version to ensure it's < 3. - return Q.nfcall(exec, python + ' --version').then(function(version) { - if (version[1].indexOf('Python 3') === 0) { - throw new Error('Incorrect version of Python, gyp requires < 3.'); - } - }); -}) - -// Successfully found all dependencies. First step is to clean the vendor -// directory. -.then(function() { - console.info('[nodegit] Removing vendor/libgit2.'); - - return Q.ninvoke(rimraf, null, paths.libgit2); -}) - -// Now fetch the libgit2 source from GitHub. -.then(function() { - console.info('[nodegit] Fetching vendor/libgit2.'); - - var url = 'https://github.com/libgit2/libgit2/tarball/' + pkg.libgit2; - - var extract = tar.Extract({ - path: paths.libgit2, - strip: true - }); - - // First extract from Zlib and then extract from Tar. - var expand = request.get(url).pipe(zlib.createUnzip()).pipe(extract); - - return Q.ninvoke(expand, 'on', 'end'); -}) - -// Fetch completed successfully. -.then(function() { - console.info('[nodegit] Creating vendor/libgit2/build.'); - - return Q.ninvoke(fs, 'mkdir', paths.build); -}) - -// Configure libgit2 using cmake. -.then(function() { - console.info('[nodegit] Configuring libgit2.'); - - // Minimum flags necessary to configure in sane environments. - var flags = ['-DTHREADSAFE=ON', '-DBUILD_CLAR=OFF']; - - // Windows flags. - if (process.platform.indexOf('win') > -1) { - flags.push.apply(flags, [ - '-DSTDCALL=OFF', - '-DBUILD_SHARED_LIBS=OFF', - '-DCMAKE_C_FLAGS=-fPIC', - '-DCMAKE_BUILD_TYPE=RelWithDebInfo' - ]); - - // If the architecture is 64bit, have to change the generator. - if (os.arch() === 'x64') { - flags.push('-G "Visual Studio 12 Win64"'); - } - } - - return Q.nfcall(exec, 'cmake .. ' + flags.join(' '), { - cwd: paths.build - }); -}) - -// Build libgit2 using cmake. -.then(function() { - console.info('[nodegit] Building libgit2.'); - - return Q.nfcall(exec, 'cmake --build .', { - cwd: paths.build - }); -}) - -// Configure the native module using node-gyp. -.then(function() { - console.info('[nodegit] Configuring native node module.'); - - return Q.nfcall(exec, systemPath([ - '.', 'node_modules', '.bin', 'node-gyp configure --python ' + python - ]), { - cwd: '.' - }); -}) - -// Build the native module using node-gyp. -.then(function() { - console.info('[nodegit] Building native node module.'); - - return Q.nfcall(exec, systemPath([ - '.', 'node_modules', '.bin', 'node-gyp build' - ]), { - cwd: '.', - maxBuffer: Number.MAX_VALUE - }); -}) - -// Display a success message. -.then(function() { - console.info('[nodegit] Completed installation successfully.'); -}) - -// Display a warning message about failing to build native node module. -.fail(function(message) { - console.info('[nodegit] Failed to build nodegit.'); - console.info(message.message); - console.info(message.stack); -}); diff --git a/lib/blame.js b/lib/blame.js new file mode 100644 index 000000000..77f3ad9a3 --- /dev/null +++ b/lib/blame.js @@ -0,0 +1,18 @@ +var NodeGit = require("../"); +var normalizeOptions = NodeGit.Utils.normalizeOptions; +var Blame = NodeGit.Blame; + +var _file = Blame.file; + +/** + * Retrieve the blame of a file + * + * @param {Repository} repo that contains the file + * @param {String} path to the file to get the blame of + * @param {BlameOptions} [options] Options for the blame + */ +Blame.file = function(repo, path, options) { + options = normalizeOptions(options, NodeGit.BlameOptions); + + return _file.call(this, repo, path, options); +}; diff --git a/lib/blob.js b/lib/blob.js index 341eef637..073623fdd 100644 --- a/lib/blob.js +++ b/lib/blob.js @@ -1,27 +1,42 @@ -var git = require('../'), - TreeEntry = git.TreeEntry, - Blob = git.Blob; +var NodeGit = require("../"); +var Blob = NodeGit.Blob; +var LookupWrapper = NodeGit.Utils.lookupWrapper; +var TreeEntry = NodeGit.TreeEntry; -var oldContent = Blob.prototype.content; +/** +* Retrieves the blob pointed to by the oid +* @async +* @param {Repository} repo The repo that the blob lives in +* @param {String|Oid|Blob} id The blob to lookup +* @return {Blob} +*/ +Blob.lookup = LookupWrapper(Blob); /** - * Retrieve the content of the blob. - * @return {Buffer} content + * Retrieve the content of the Blob. + * + * @return {Buffer} Contents as a buffer. */ Blob.prototype.content = function() { - return oldContent.call(this).toBuffer(this.size()); + return this.rawcontent().toBuffer(this.rawsize()); }; /** - * Retrieve the blob's content as String. + * Retrieve the Blob's type. + * + * @return {Number} The filemode of the blob. */ -Blob.prototype.toString = function() { - return this.content().toString(); +Blob.prototype.filemode = function() { + var FileMode = TreeEntry.FILEMODE; + + return this.isBinary() ? FileMode.EXECUTABLE : FileMode.BLOB; }; /** - * Retrieve the blob's type. + * Retrieve the Blob's content as String. + * + * @return {String} Contents as a string. */ -Blob.prototype.filemode = function() { - return this.isBinary() ? TreeEntry.FileMode.Executable : TreeEntry.FileMode.Blob; +Blob.prototype.toString = function() { + return this.content().toString(); }; diff --git a/lib/checkout.js b/lib/checkout.js new file mode 100644 index 000000000..652f6116a --- /dev/null +++ b/lib/checkout.js @@ -0,0 +1,51 @@ +var NodeGit = require("../"); +var normalizeOptions = NodeGit.Utils.normalizeOptions; + +var Checkout = NodeGit.Checkout; +var _head = Checkout.head; +var _index = Checkout.index; +var _tree = Checkout.tree; + +/** +* Patch head checkout to automatically coerce objects. +* +* @async +* @param {Repository} repo The repo to checkout head +* @param {CheckoutOptions} [options] Options for the checkout +* @return {Void} checkout complete +*/ +Checkout.head = function(url, options) { + options = normalizeOptions(options || {}, NodeGit.CheckoutOptions); + + return _head.call(this, url, options); +}; + +/** +* Patch index checkout to automatically coerce objects. +* +* @async +* @param {Repository} repo The repo to checkout an index +* @param {Index} The index to checkout +* @param {CheckoutOptions} [options] Options for the checkout +* @return {Void} checkout complete +*/ +Checkout.index = function(repo, index, options) { + options = normalizeOptions(options || {}, NodeGit.CheckoutOptions); + + return _index.call(this, repo, index, options); +}; + +/** +* Patch tree checkout to automatically coerce objects. +* +* @async +* @param {Repository} repo +* @param {Oid|Tree|Commit|Reference} treeish +* @param {CheckoutOptions} [options] +* @return {Void} checkout complete +*/ +Checkout.tree = function(repo, treeish, options) { + options = normalizeOptions(options || {}, NodeGit.CheckoutOptions); + + return _tree.call(this, repo, treeish, options); +}; diff --git a/lib/cherrypick.js b/lib/cherrypick.js new file mode 100644 index 000000000..cf003c2a5 --- /dev/null +++ b/lib/cherrypick.js @@ -0,0 +1,73 @@ +var NodeGit = require("../"); +var shallowClone = NodeGit.Utils.shallowClone; +var normalizeOptions = NodeGit.Utils.normalizeOptions; + +var Cherrypick = NodeGit.Cherrypick; +var _cherrypick = Cherrypick.cherrypick; +var _commit = Cherrypick.commit; + +/** +* Cherrypick a commit and, changing the index and working directory +* +* @async +* @param {Repository} repo The repo to checkout head +* @param {Commit} commit The commit to cherrypick +* @param {CherrypickOptions} [options] Options for the cherrypick +* @return {int} 0 on success, -1 on failure +*/ +Cherrypick.cherrypick = function(repo, commit, options) { + var mergeOpts; + var checkoutOpts; + + if (options) { + options = shallowClone(options); + mergeOpts = options.mergeOpts; + checkoutOpts = options.checkoutOpts; + delete options.mergeOpts; + delete options.checkoutOpts; + } + + options = normalizeOptions(options, NodeGit.CherrypickOptions); + + if (mergeOpts) { + options.mergeOpts = + normalizeOptions(mergeOpts, NodeGit.MergeOptions); + } + + if (checkoutOpts) { + options.checkoutOpts = + normalizeOptions(checkoutOpts, NodeGit.CheckoutOptions); + } + + return _cherrypick.call(this, repo, commit, options); +}; + +/** +* Cherrypicks the given commit against "our" commit, producing an index that +* reflects the result of the cherrypick. The index is not backed by a repo. +* +* @async +* @param {Repository} repo The repo to cherrypick commits +* @param {Commit} cherrypick_commit The commit to cherrypick +* @param {Commit} our_commit The commit to revert against +* @param {int} mainline The parent of the revert commit (1 or +* 2) if it's a merge, 0 otherwise +* @param {MergeOptions} [merge_options] Merge options for the cherrypick +* @return {int} 0 on success, -1 on failure +*/ +Cherrypick.commit = function( + repo, + cherrypick_commit, + our_commit, + mainline, + merge_options) { + merge_options = normalizeOptions(merge_options, NodeGit.MergeOptions); + + return _commit.call( + this, + repo, + cherrypick_commit, + our_commit, + mainline, + merge_options); +}; diff --git a/lib/clone.js b/lib/clone.js new file mode 100644 index 000000000..8dab5c3c3 --- /dev/null +++ b/lib/clone.js @@ -0,0 +1,60 @@ +var NodeGit = require("../"); +var shallowClone = NodeGit.Utils.shallowClone; +var normalizeOptions = NodeGit.Utils.normalizeOptions; + +var Clone = NodeGit.Clone; +var _clone = Clone.clone; + +/** + * Patch repository cloning to automatically coerce objects. + * + * @async + * @param {String} url url of the repository + * @param {String} local_path local path to store repository + * @param {CloneOptions} [options] + * @return {Repository} repo + */ +Clone.clone = function(url, local_path, options) { + var remoteCallbacks = {}; + var fetchOpts = {}; + + if (options) { + options = shallowClone(options); + if (options.fetchOpts) { + fetchOpts = shallowClone(options.fetchOpts); + } + delete options.fetchOpts; + } + + options = normalizeOptions(options, NodeGit.CloneOptions); + + if (fetchOpts.callbacks) { + remoteCallbacks = shallowClone(fetchOpts.callbacks); + delete fetchOpts.callbacks; + } + + fetchOpts = normalizeOptions(fetchOpts, NodeGit.FetchOptions); + + fetchOpts.callbacks = + normalizeOptions(remoteCallbacks, NodeGit.RemoteCallbacks); + + if (options) { + options.fetchOpts = fetchOpts; + } + + // This is required to clean up after the clone to avoid file locking + // issues in Windows and potentially other issues we don't know about. + var freeRepository = function(repository) { + repository.free(); + }; + + // We want to provide a valid repository object, so reopen the repository + // after clone and cleanup. + var openRepository = function() { + return NodeGit.Repository.open(local_path); + }; + + return _clone.call(this, url, local_path, options) + .then(freeRepository) + .then(openRepository); +}; diff --git a/lib/commit.js b/lib/commit.js index 8dc0851f6..73d3f62b4 100644 --- a/lib/commit.js +++ b/lib/commit.js @@ -1,21 +1,52 @@ -var git = require('../'), - Commit = git.Commit, - events = require('events'); +var events = require("events"); +var NodeGit = require("../"); +var Commit = NodeGit.Commit; +var LookupWrapper = NodeGit.Utils.lookupWrapper; +var _amend = Commit.prototype.amend; /** - * Retrieve the SHA. - * @return {String} + * Retrieves the commit pointed to by the oid + * @async + * @param {Repository} repo The repo that the commit lives in + * @param {String|Oid|Commit} id The commit to lookup + * @return {Commit} */ -Commit.prototype.sha = function() { - return this.oid().sha(); -}; +Commit.lookup = LookupWrapper(Commit); /** - * Retrieve the commit time as a unix timestamp. - * @return {Number} + * Amend a commit + * @async + * @param {String} update_ref + * @param {Signature} author + * @param {Signature} committer + * @param {String} message_encoding + * @param {String} message + * @param {Tree|Oid} tree + * @param {Oid} callback */ -Commit.prototype.timeMs = function() { - return this.time() * 1000; +Commit.prototype.amend = function ( + updateRef, author, committer, message_encoding, message, tree, callback) { + var repo = this.repo; + var _this = this; + var treePromise; + + if (tree instanceof NodeGit.Oid){ + treePromise = repo.getTree(tree); + } else { + treePromise = Promise.resolve(tree); + } + + return treePromise + .then(function(treeObject){ + return _amend.call(_this, + updateRef, + author, + committer, + message_encoding, + message, + treeObject + ); + }); }; /** @@ -27,129 +58,191 @@ Commit.prototype.date = function() { }; /** - * Get the tree associated with this commit. - * @return {Tree} + * Generate an array of diff trees showing changes between this commit + * and its parent(s). + * + * @async + * @param {Function} callback + * @return {Array} an array of diffs */ -Commit.prototype.getTree = function(callback) { - this.repo.getTree(this.treeId(), callback); +Commit.prototype.getDiff = function(callback) { + return this.getDiffWithOptions(null, callback); +}; + +/** + * Generate an array of diff trees showing changes between this commit + * and its parent(s). + * + * @async + * @param {Object} options + * @param {Function} callback + * @return {Array} an array of diffs + */ +Commit.prototype.getDiffWithOptions = function(options, callback) { + var commit = this; + + return commit.getTree().then(function(thisTree) { + return commit.getParents().then(function(parents) { + var diffs; + if (parents.length) { + diffs = parents.map(function(parent) { + return parent.getTree().then(function(parentTree) { + return thisTree.diffWithOptions(parentTree, options); + }); + }); + } else { + diffs = [thisTree.diffWithOptions(null, options)]; + } + + return Promise.all(diffs); + }); + }).then(function(diffs) { + if (typeof callback === "function") { + callback(null, diffs); + } + + return diffs; + }, callback); }; /** * Retrieve the entry represented by path for this commit. * Path must be relative to repository root. * + * @async * @param {String} path - * @param {Function} callback * @return {TreeEntry} */ Commit.prototype.getEntry = function(path, callback) { - this.getTree(function(error, tree) { - if (error) return callback(error); + return this.getTree().then(function(tree) { + return tree.getEntry(path).then(function(entry) { + if (typeof callback === "function") { + callback(null, entry); + } + + return entry; + }); + }, callback); +}; + +/** + * Retrieve the commit's parents as commit objects. + * + * @async + * @param {number} limit Optional amount of parents to return. + * @param {Function} callback + * @return {Array} array of commits + */ +Commit.prototype.getParents = function(limit, callback) { + var parents = []; + + // Shift arguments. + if (typeof limit === "function") { + callback = limit; + } + + // If no limit was set, default to the maximum parents. + limit = typeof limit === "number" ? limit : this.parentcount(); + limit = Math.min(limit, this.parentcount()); - tree.getEntry(path, callback); - }); + for (var i = 0; i < limit; i++) { + var oid = this.parentId(i); + var parent = this.repo.getCommit(oid); + + parents.push(parent); + } + + // Wait for all parents to complete, before returning. + return Promise.all(parents).then(function(parents) { + if (typeof callback === "function") { + callback(null, parents); + } + + return parents; + }, callback); +}; + +/** + * Get the tree associated with this commit. + * + * @async + * @return {Tree} + */ +Commit.prototype.getTree = function(callback) { + return this.repo.getTree(this.treeId(), callback); }; /** * Walk the history from this commit backwards. - * An EventEmitter is returned that will emit a 'commit' event for each - * commit in the history, and one 'end' event when the walk is completed. + * + * An EventEmitter is returned that will emit a "commit" event for each + * commit in the history, and one "end" event when the walk is completed. * Don't forget to call `start()` on the returned event. * - * @fires Commit#commit - * @fires Commit#end + * @fires EventEmitter#commit Commit + * @fires EventEmitter#end Array + * @fires EventEmitter#error Error * * @return {EventEmitter} + * @start start() */ Commit.prototype.history = function() { var event = new events.EventEmitter(); - - var oid = this.oid(); + var oid = this.id(); var revwalk = this.repo.createRevWalk(); + revwalk.sorting.apply(revwalk, arguments); var commits = []; + event.start = function() { revwalk.walk(oid, function commitRevWalk(error, commit) { - if (error) return event.emit('error', error); - - if (!commit) { - event.emit('end', commits); - return; + if (error) { + if (error.errno === NodeGit.Error.CODE.ITEROVER) { + event.emit("end", commits); + return; + } else { + return event.emit("error", error); + } } - event.emit('commit', commit); + + event.emit("commit", commit); commits.push(commit); }); }; - return event; -}; - -/** - * Retrieve the commit's parents -- as commit objects. - * - * @param {Function} callback - * @return {[Commit]} array of commits - */ -Commit.prototype.getParents = function(callback) { - var self = this; - function processParents(commit, n, acc, callback) { - if (n < 0) return callback(null, acc); - - self.repo.getCommit(self.parentId(n), function nextParent(error, parent) { - if (error) return callback(error); - processParents(parent, n-1, acc.concat([parent]), callback); - }); - } - processParents(this, this.parentCount() - 1, [], callback); + return event; }; /** * Retrieve the commit's parent shas. * - * @param {Function} callback - * @return {[Oid]} array of oids + * @return {Array} array of oids */ Commit.prototype.parents = function() { var result = []; - for (var i = 0; i < this.parentCount(); i++) { + + for (var i = 0; i < this.parentcount(); i++) { result.push(this.parentId(i)); } + return result; -} +}; /** - * Generate an array of diff trees showing changes between this commit - * and its parent(s). - * - * @param {Function} callback - * @return {[DiffList]} an array of difflists + * Retrieve the SHA. + * @return {String} */ -Commit.prototype.getDiff = function(callback) { - var self = this; - self.getParents(function commitParents(error, parents) { - if (error) return callback(error); - - var parentDiffLists = []; - parents.forEach(function commitEachParent(parent) { - parent.getTree(function(error, parentTree) { - if (error) return callback(error); - - self.getTree(function(error, thisTree) { - if (error) return callback(error); - - parentTree.diff(thisTree, function walkDiffList(error, diffList) { - if (error) return callback(error); +Commit.prototype.sha = function() { + return this.id().toString(); +}; - parentDiffLists.push(diffList); - if (parentDiffLists.length === parents.length) { - callback(null, parentDiffLists); - } - }); - }); - }); - }); - }); +/** + * Retrieve the commit time as a unix timestamp. + * @return {Number} + */ +Commit.prototype.timeMs = function() { + return this.time() * 1000; }; /** diff --git a/lib/config.js b/lib/config.js new file mode 100644 index 000000000..1527ede7b --- /dev/null +++ b/lib/config.js @@ -0,0 +1,8 @@ +var NodeGit = require("../"); + +var Config = NodeGit.Config; + +// Backwards compatibility. +Config.prototype.getString = function() { + return this.getStringBuf.apply(this, arguments); +}; diff --git a/lib/convenient_hunk.js b/lib/convenient_hunk.js deleted file mode 100644 index 9f032a6a5..000000000 --- a/lib/convenient_hunk.js +++ /dev/null @@ -1,34 +0,0 @@ -function ConvenientHunk(raw, i) { - this.raw = raw; - this.i = i; -} - -/** - * Diff header string that represents the context of this hunk - * of the diff. Something like `@@ -169,14 +167,12 @@ ...` - * @return {String} - */ -ConvenientHunk.prototype.header = function() { - return this.raw.hunk(this.i).header; -}; - -/** - * Number of lines in this hunk - * @return {Number} - */ -ConvenientHunk.prototype.size = function() { - return this.raw.hunk(this.i).lines; -}; - -/** - * The lines in this hunk - * @return {[String]} array of strings - */ -ConvenientHunk.prototype.lines = function() { - var result = []; - for (var i = 0; i < this.size(); i++) - result.push(this.raw.line(this.i, i)); - return result; -}; - -exports.ConvenientHunk = ConvenientHunk; diff --git a/lib/convenient_hunks.js b/lib/convenient_hunks.js new file mode 100644 index 000000000..ef8070d06 --- /dev/null +++ b/lib/convenient_hunks.js @@ -0,0 +1,61 @@ +var NodeGit = require("../"); + +var ConvenientHunk = NodeGit.ConvenientHunk; + +var header = ConvenientHunk.prototype.header; + /** + * Diff header string that represents the context of this hunk + * of the diff. Something like `@@ -169,14 +167,12 @@ ...` + * @return {String} + */ +ConvenientHunk.prototype.header = header; + +var headerLen = ConvenientHunk.prototype.headerLen; +/** + * The length of the header + * @return {Number} + */ +ConvenientHunk.prototype.headerLen = headerLen; + +var lines = ConvenientHunk.prototype.lines; +/** + * The lines in this hunk + * @async + * @return {Array} + */ +ConvenientHunk.prototype.lines = lines; + +var newLines = ConvenientHunk.prototype.newLines; +/** + * The number of new lines in the hunk + * @return {Number} + */ +ConvenientHunk.prototype.newLines = newLines; + +var newStart = ConvenientHunk.prototype.newStart; +/** + * The starting offset of the first new line in the file + * @return {Number} + */ +ConvenientHunk.prototype.newStart = newStart; + +var oldLines = ConvenientHunk.prototype.oldLines; +/** + * The number of old lines in the hunk + * @return {Number} + */ +ConvenientHunk.prototype.oldLines = oldLines; + +var oldStart = ConvenientHunk.prototype.oldStart; +/** + * The starting offset of the first old line in the file + * @return {Number} + */ +ConvenientHunk.prototype.oldStart = oldStart; + +var size = ConvenientHunk.prototype.size; +/** + * Number of lines in this hunk + * @return {Number} + */ +ConvenientHunk.prototype.size = size; diff --git a/lib/convenient_patch.js b/lib/convenient_patch.js index ed34735cb..1e825382b 100644 --- a/lib/convenient_patch.js +++ b/lib/convenient_patch.js @@ -1,124 +1,131 @@ -var git = require('../'), - DiffList = git.DiffList, - ConvenientHunk = require('./convenient_hunk').ConvenientHunk; +var NodeGit = require("../"); -function ConvenientPatch(raw) { - this.raw = raw; -} +var ConvenientPatch = NodeGit.ConvenientPatch; +var hunks = ConvenientPatch.prototype.hunks; /** - * Old name of the file - * @return {String} + * The hunks in this patch + * @async + * @return {Array} a promise that resolves to an array of + * ConvenientHunks */ -ConvenientPatch.prototype.oldFile = function() { - return this.raw.delta.oldFile(); -}; +ConvenientPatch.prototype.hunks = hunks; +var isAdded = ConvenientPatch.prototype.isAdded; /** - * New name of the file - * @return {String} + * Is this an added patch? + * @return {Boolean} */ -ConvenientPatch.prototype.newFile = function() { - return this.raw.delta.newFile(); -}; +ConvenientPatch.prototype.isAdded = isAdded; +var isConflicted = ConvenientPatch.prototype.isConflicted; /** - * The number of hunks in this patch - * @return {Number} + * Is this a conflicted patch? + * @return {Boolean} */ -ConvenientPatch.prototype.size = function() { - return this.raw.patch.size(); -}; +ConvenientPatch.prototype.isConflicted = isConflicted; +var isCopied = ConvenientPatch.prototype.isCopied; /** - * The hunks in this patch - * @return {[ConvenientHunk]} an array of ConvenientHunks + * Is this a copied patch? + * @return {Boolean} */ -ConvenientPatch.prototype.hunks = function() { - var result = []; - for (var i = 0; i < this.size(); i++) - result.push(new ConvenientHunk(this.raw.patch, i)); - return result; -}; +ConvenientPatch.prototype.isCopied = isCopied; +var isDeleted = ConvenientPatch.prototype.isDeleted; /** - * The status of this patch (unmodified, added, deleted) - * @return {Number} + * Is this a deleted patch? + * @return {Boolean} */ -ConvenientPatch.prototype.status = function() { - return this.raw.delta.status(); -}; +ConvenientPatch.prototype.isDeleted = isDeleted; +var isIgnored = ConvenientPatch.prototype.isIgnored; /** - * Is this an unmodified patch? + * Is this an ignored patch? * @return {Boolean} */ -ConvenientPatch.prototype.isUnmodified = function() { - return this.status() == DiffList.Delta.Unmodified; -}; +ConvenientPatch.prototype.isIgnored = isIgnored; +var isModified = ConvenientPatch.prototype.isModified; /** - * Is this an added patch? + * Is this an modified patch? * @return {Boolean} */ -ConvenientPatch.prototype.isAdded = function() { - return this.status() == DiffList.Delta.Added; -}; +ConvenientPatch.prototype.isModified = isModified; +var isRenamed = ConvenientPatch.prototype.isRenamed; /** - * Is this a deleted patch? + * Is this a renamed patch? * @return {Boolean} */ -ConvenientPatch.prototype.isDeleted = function() { - return this.status() == DiffList.Delta.Deleted; -}; +ConvenientPatch.prototype.isRenamed = isRenamed; +var isTypeChange = ConvenientPatch.prototype.isTypeChange; /** - * Is this an modified patch? + * Is this a type change? * @return {Boolean} */ -ConvenientPatch.prototype.isModified = function() { - return this.status() == DiffList.Delta.Modified; -}; +ConvenientPatch.prototype.isTypeChange = isTypeChange; +var isUnmodified = ConvenientPatch.prototype.isUnmodified; /** - * Is this a renamed patch? + * Is this an unmodified patch? * @return {Boolean} */ -ConvenientPatch.prototype.isRenamed = function() { - return this.status() == DiffList.Delta.Renamed; -}; +ConvenientPatch.prototype.isUnmodified = isUnmodified; +var isUnreadable = ConvenientPatch.prototype.isUnreadable; /** - * Is this a copied patch? + * Is this an undreadable patch? * @return {Boolean} */ -ConvenientPatch.prototype.isCopied = function() { - return this.status() == DiffList.Delta.Copied; -}; +ConvenientPatch.prototype.isUnreadable = isUnreadable; +var isUntracked = ConvenientPatch.prototype.isUntracked; /** - * Is this an ignored patch? + * Is this an untracked patch? * @return {Boolean} */ -ConvenientPatch.prototype.isIgnored = function() { - return this.status() == DiffList.Delta.Ignored; -}; +ConvenientPatch.prototype.isUntracked = isUntracked; /** - * Is this an untracked patch? - * @return {Boolean} + * @typedef lineStats + * @type {Object} + * @property {number} total_context # of contexts in the patch + * @property {number} total_additions # of lines added in the patch + * @property {number} total_deletions # of lines deleted in the patch */ -ConvenientPatch.prototype.isUntracked = function() { - return this.status() == DiffList.Delta.Untracked; -}; +var lineStats = ConvenientPatch.prototype.lineStats; +/** + * The line statistics of this patch (#contexts, #added, #deleted) + * @return {lineStats} + */ +ConvenientPatch.prototype.lineStats = lineStats; +var newFile = ConvenientPatch.prototype.newFile; /** - * Is this a type change? - * @return {Boolean} + * New attributes of the file + * @return {DiffFile} + */ +ConvenientPatch.prototype.newFile = newFile; + +var oldFile = ConvenientPatch.prototype.oldFile; +/** + * Old attributes of the file + * @return {DiffFile} */ -ConvenientPatch.prototype.isTypeChange = function() { - return this.status() == DiffList.Delta.TypeChange; -}; +ConvenientPatch.prototype.oldFile = oldFile; -exports.ConvenientPatch = ConvenientPatch; +var size = ConvenientPatch.prototype.size; +/** + * The number of hunks in this patch + * @return {Number} + */ +ConvenientPatch.prototype.size = size; + +var status = ConvenientPatch.prototype.status; +/** + * The status of this patch (unmodified, added, deleted) + * @return {Number} + */ +ConvenientPatch.prototype.status = status; diff --git a/lib/diff.js b/lib/diff.js new file mode 100644 index 000000000..b393595f8 --- /dev/null +++ b/lib/diff.js @@ -0,0 +1,113 @@ +var NodeGit = require("../"); +var Diff = NodeGit.Diff; +var normalizeOptions = NodeGit.Utils.normalizeOptions; +var Patch = NodeGit.Patch; + +var _blobToBuffer = Diff.blobToBuffer; +var _indexToWorkdir = Diff.indexToWorkdir; +var _treeToIndex = Diff.treeToIndex; +var _treeToTree = Diff.treeToTree; +var _treeToWorkdir = Diff.treeToWorkdir; +var _treeToWorkdirWithIndex = Diff.treeToWorkdirWithIndex; + +var _findSimilar = Diff.prototype.findSimilar; + +/** + * Directly run a diff between a blob and a buffer. + * @async + * @param {Blob} old_blob Blob for old side of diff, or NULL for empty blob + * @param {String} old_as_path Treat old blob as if it had this filename; + * can be NULL + * @param {String} buffer Raw data for new side of diff, or NULL for empty + * @param {String} buffer_as_path Treat buffer as if it had this filename; + * can be NULL + * @param {DiffOptions} opts Options for diff, or NULL for default options + * @param {Function} file_cb Callback for "file"; made once if there is a diff; + * can be NULL + * @param {Function} binary_cb Callback for binary files; can be NULL + * @param {Function} hunk_cb Callback for each hunk in diff; can be NULL + * @param {Function} line_cb Callback for each line in diff; can be NULL + */ +Diff.blobToBuffer= function( + old_blob, + old_as_path, + buffer, + buffer_as_path, + opts, + file_cb, + binary_cb, + hunk_cb, + line_cb) { + var bufferText; + var bufferLength; + if (buffer instanceof Buffer) { + bufferText = buffer.toString("utf8"); + bufferLength = Buffer.byteLength(buffer, "utf8"); + } else { + bufferText = buffer; + bufferLength = !buffer ? 0 : Buffer.byteLength(buffer, "utf8"); + } + + opts = normalizeOptions(opts, NodeGit.DiffOptions); + + return _blobToBuffer.call( + this, + old_blob, + old_as_path, + bufferText, + bufferLength, + buffer_as_path, + opts, + file_cb, + binary_cb, + hunk_cb, + line_cb, + null); +}; + +// Override Diff.indexToWorkdir to normalize opts +Diff.indexToWorkdir = function(repo, index, opts) { + opts = normalizeOptions(opts, NodeGit.DiffOptions); + return _indexToWorkdir(repo, index, opts); +}; + +// Override Diff.treeToIndex to normalize opts +Diff.treeToIndex = function(repo, tree, index, opts) { + opts = normalizeOptions(opts, NodeGit.DiffOptions); + return _treeToIndex(repo, tree, index, opts); +}; + +// Override Diff.treeToTree to normalize opts +Diff.treeToTree = function(repo, from_tree, to_tree, opts) { + opts = normalizeOptions(opts, NodeGit.DiffOptions); + return _treeToTree(repo, from_tree, to_tree, opts); +}; + +// Override Diff.treeToWorkdir to normalize opts +Diff.treeToWorkdir = function(repo, tree, opts) { + opts = normalizeOptions(opts, NodeGit.DiffOptions); + return _treeToWorkdir(repo, tree, opts); +}; + +// Override Diff.treeToWorkdir to normalize opts +Diff.treeToWorkdirWithIndex = function(repo, tree, opts) { + opts = normalizeOptions(opts, NodeGit.DiffOptions); + return _treeToWorkdirWithIndex(repo, tree, opts); +}; + +// Override Diff.findSimilar to normalize opts +Diff.prototype.findSimilar = function(opts) { + opts = normalizeOptions(opts, NodeGit.DiffFindOptions); + return _findSimilar.call(this, opts); +}; + +/** + * Retrieve patches in this difflist + * + * @async + * @return {Array} a promise that resolves to an array of + * ConvenientPatches + */ +Diff.prototype.patches = function() { + return Patch.convenientFromDiff(this); +}; diff --git a/lib/diff_file.js b/lib/diff_file.js new file mode 100644 index 000000000..34ce315ce --- /dev/null +++ b/lib/diff_file.js @@ -0,0 +1,38 @@ +var NodeGit = require("../"); + +var DiffFile = NodeGit.DiffFile; + +var flags = DiffFile.prototype.flags; +/** + * Returns the file's flags + * @return {Number} + */ +DiffFile.prototype.flags = flags; + +var id = DiffFile.prototype.id; +/** + * Returns the file's Oid + * @return {Oid} + */ +DiffFile.prototype.id = id; + +var mode = DiffFile.prototype.mode; +/** + * Returns the file's mode + * @return {Number} + */ +DiffFile.prototype.mode = mode; + +var path = DiffFile.prototype.path; +/** + * Returns the file's path + * @return {String} + */ +DiffFile.prototype.path = path; + +var size = DiffFile.prototype.size; +/** + * Returns the file's size + * @return {Number} + */ +DiffFile.prototype.size = size; diff --git a/lib/diff_line.js b/lib/diff_line.js new file mode 100644 index 000000000..83e434401 --- /dev/null +++ b/lib/diff_line.js @@ -0,0 +1,32 @@ +var NodeGit = require("../"); +var DiffLine = NodeGit.DiffLine; + +var _rawContent = DiffLine.prototype.content; + +/** +* The relevant line +* @return {String} +*/ +DiffLine.prototype.content = function() { + if (!this._cache) { + this._cache = {}; + } + + if (!this._cache.content) { + this._cache.content = new Buffer(this.rawContent()) + .slice(0, this.contentLen()) + .toString("utf8"); + } + + return this._cache.content; +}; + +/** +* The non utf8 translated text +* @return {String} +*/ +DiffLine.prototype.rawContent = function() { + return _rawContent.call(this); +}; + +NodeGit.DiffLine = DiffLine; diff --git a/lib/diff_list.js b/lib/diff_list.js deleted file mode 100644 index 5cc125085..000000000 --- a/lib/diff_list.js +++ /dev/null @@ -1,53 +0,0 @@ -var git = require('../'), - DiffList = git.DiffList, - ConvenientPatch = require('./convenient_patch').ConvenientPatch, - events = require('events'); - -/** - * Refer to vendor/libgit2/include/git2/diff.h for delta type definitions. - * - * @readonly - * @enum {Integer} - */ -DiffList.Delta = { - /** 0 */ Unmodified: 0, - /** 1 */ Added: 1, - /** 2 */ Deleted: 2, - /** 3 */ Modified: 3, - /** 4 */ Renamed: 4, - /** 5 */ Copied: 5, - /** 6 */ Ignored: 6, - /** 7 */ Untracked: 7, - /** 8 */ TypeChange: 8 -}; - -/** - * Refer to vendor/libgit2/include/git2/diff.h for line origin type definitions. - * - * @readOnly - * @enum {String} - */ -DiffList.LineOrigin = { - /** ' ' */ Context: 32, - /** '+' */ Addition: 43, - /** '-' */ Deletion: 45, - /** '\n' */ AddEofNl: 13, - /** '' */ DelEofNl: 0, - /** 'F' */ FileHdr: 106, - /** 'H' */ HunkHdr: 110, - /** 'B' */ Binary: 102 -}; - -/** - * Retrieve patches in this difflist - * - * @return {[ConvenientPatch]} an array of ConvenientPatches - */ -DiffList.prototype.patches = function() { - var size = this.size(); - result = []; - for (var i = 0; i < size; i++) { - result.push(new ConvenientPatch(this.patch(i))); - } - return result; -}; diff --git a/lib/index.js b/lib/index.js index 6dd690e54..cb87784d3 100644 --- a/lib/index.js +++ b/lib/index.js @@ -1,15 +1,34 @@ -var git = require('../'), - Index = git.Index; +var NodeGit = require("../"); + +var Index = NodeGit.Index; + +var _addAll = Index.prototype.addAll; +var _removeAll = Index.prototype.removeAll; +var _updateAll = Index.prototype.updateAll; + +Index.prototype.addAll = function(pathspec, flags, matchedCallback) { + return _addAll.call(this, pathspec || "*", flags, matchedCallback, null); +}; /** * Return an array of the entries in this index. - * @return {[IndexEntry]} an array of IndexEntrys + * @return {Array} an array of IndexEntrys */ Index.prototype.entries = function() { - var size = this.size(), - result = []; + var size = this.entryCount(); + var result = []; + for (var i = 0; i < size; i++) { - result.push(this.entry(i)); + result.push(this.getByIndex(i)); } + return result; }; + +Index.prototype.removeAll = function(pathspec, matchedCallback) { + return _removeAll.call(this, pathspec || "*", matchedCallback, null); +}; + +Index.prototype.updateAll = function(pathspec, matchedCallback) { + return _updateAll.call(this, pathspec || "*", matchedCallback, null); +}; diff --git a/lib/merge.js b/lib/merge.js new file mode 100644 index 000000000..e9b9b5c18 --- /dev/null +++ b/lib/merge.js @@ -0,0 +1,47 @@ +var NodeGit = require("../"); +var normalizeOptions = NodeGit.Utils.normalizeOptions; + +var Merge = NodeGit.Merge; +var _commits = Merge.commits; +var _merge = Merge.merge; + +/** + * Merge 2 commits together and create an new index that can + * be used to create a merge commit. + * + * @param {Repository} repo Repository that contains the given commits + * @param {Commit} ourCommit The commit that reflects the destination tree + * @param {Commit} theirCommit The commit to merge into ourCommit + * @param {MergeOptions} [options] The merge tree options (null for default) + */ +Merge.commits = function(repo, ourCommit, theirCommit, options) { + options = normalizeOptions(options, NodeGit.MergeOptions); + + return Promise.all([ + repo.getCommit(ourCommit), + repo.getCommit(theirCommit) + ]).then(function(commits) { + return _commits.call(this, repo, commits[0], commits[1], options); + }); +}; + +/** + * Merge a commit into HEAD and writes the results to the working directory. + * + * @param {Repository} repo Repository that contains the given commits + * @param {AnnotatedCommit} theirHead The annotated commit to merge into HEAD + * @param {MergeOptions} [mergeOpts] The merge tree options (null for default) + * @param {CheckoutOptions} [checkoutOpts] The checkout options + * (null for default) + */ +Merge.merge = function(repo, theirHead, mergeOpts, checkoutOpts) { + mergeOpts = normalizeOptions(mergeOpts || {}, NodeGit.MergeOptions); + checkoutOpts = normalizeOptions(checkoutOpts || {}, NodeGit.CheckoutOptions); + + // Even though git_merge takes an array of annotated_commits, it expects + // exactly one to have been passed in or it will throw an error... ¯\_(ツ)_/¯ + var theirHeads = [theirHead]; + + return _merge.call(this, repo, theirHeads, theirHeads.length, + mergeOpts, checkoutOpts); +}; diff --git a/lib/note.js b/lib/note.js new file mode 100644 index 000000000..8e46427cf --- /dev/null +++ b/lib/note.js @@ -0,0 +1,17 @@ +var NodeGit = require("../"); + +var Note = NodeGit.Note; + +var _foreach = Note.foreach; + +// Override Note.foreach to eliminate the need to pass null payload +Note.foreach = function(repo, notesRef, callback) { + function wrapperCallback(blobId, objectId) { + // We need to copy the OID since libgit2 types are getting cleaned up + // incorrectly right now in callbacks + + return callback(blobId.copy(), objectId.copy()); + } + + return _foreach(repo, notesRef, wrapperCallback, null); +}; diff --git a/lib/object.js b/lib/object.js index 64c774f84..85917987a 100644 --- a/lib/object.js +++ b/lib/object.js @@ -1,46 +1,35 @@ -var git = require('../'); +var NodeGit = require("../"); -git.Object.Type = { - Any: -2, /**< Object can be any of the following */ - Bad: -1, /**< Object is invalid. */ - Ext1: 0, /**< Reserved for future use. */ - Commit: 1, /**< A commit object. */ - Tree: 2, /**< A tree (directory listing) object. */ - Blob: 3, /**< A file revision object. */ - Tag: 4, /**< An annotated tag object. */ - Ext2: 5, /**< Reserved for future use. */ - OffsetDelta: 6, /**< A delta, base is given by an offset. */ - OidDelta: 7 /**< A delta, base is given by object id. */ -}; +var Obj = NodeGit.Object; /** - * Is this object a commit? + * Is this object a blob? * @return {Boolean} */ -git.Object.prototype.isCommit = function() { - return this.type() == git.Object.Type.Commit; +Obj.prototype.isBlob = function() { + return this.type() == Obj.TYPE.BLOB; }; /** - * Is this object a tree? + * Is this object a commit? * @return {Boolean} */ -git.Object.prototype.isTree = function() { - return this.type() == git.Object.Type.Tree; +Obj.prototype.isCommit = function() { + return this.type() == Obj.TYPE.COMMIT; }; /** - * Is this object a blob? + * Is this object a tag? * @return {Boolean} */ -git.Object.prototype.isBlob = function() { - return this.type() == git.Object.Type.Blob; +Obj.prototype.isTag = function() { + return this.type() == Obj.TYPE.TAG; }; /** - * Is this object a tag? + * Is this object a tree? * @return {Boolean} */ -git.Object.prototype.isTag = function() { - return this.type() == git.Object.Type.Tag; +Obj.prototype.isTree = function() { + return this.type() == Obj.TYPE.TREE; }; diff --git a/lib/odb.js b/lib/odb.js index 28213903d..8bbd15a62 100644 --- a/lib/odb.js +++ b/lib/odb.js @@ -1,13 +1,15 @@ -var git = require('../'), - util = require('./util.js'), - Odb = git.Odb; - -/** - * Retrieve the object identified by oid. - * - * @param {String|Oid} String sha or Oid - * @param {Function} callback - * @return {git.Object} a git odb object - */ -util.normalizeOid(Odb.prototype, 'read'); -util.makeSafe(Odb.prototype, 'read'); +var NodeGit = require("../"); + +var Odb = NodeGit.Odb; + +var _read = Odb.prototype.read; + +Odb.prototype.read = function(oid, callback) { + return _read.call(this, oid).then(function(odbObject) { + if (typeof callback === "function") { + callback(null, odbObject); + } + + return odbObject; + }, callback); +}; diff --git a/lib/odb_object.js b/lib/odb_object.js new file mode 100644 index 000000000..cd5995eb5 --- /dev/null +++ b/lib/odb_object.js @@ -0,0 +1,9 @@ +var NodeGit = require("../"); + +var OdbObject = NodeGit.OdbObject; + +OdbObject.prototype.toString = function(size) { + size = size || this.size(); + + return this.data().toBuffer(size).toString(); +}; diff --git a/lib/oid.js b/lib/oid.js index cb531d7d8..fcbf9b48f 100644 --- a/lib/oid.js +++ b/lib/oid.js @@ -1,14 +1,23 @@ -var git = require('../'), - Oid = git.Oid; - -/** - * The hex representation of the SHA - * @return {String} - */ -Oid.prototype.toString = function() { - return this.sha(); +var NodeGit = require("../"); + +var Oid = NodeGit.Oid; + +// Backwards compatibility. +Object.defineProperties(Oid.prototype, { + "allocfmt": { + value: Oid.prototype.tostrS, + enumerable: false + }, + "toString": { + value: Oid.prototype.tostrS, + enumerable: false + } +}); + +Oid.prototype.copy = function() { + return this.cpy(); // seriously??? }; Oid.prototype.inspect = function() { - return "[Oid " + this.sha() + "]"; + return "[Oid " + this.allocfmt() + "]"; }; diff --git a/lib/rebase.js b/lib/rebase.js new file mode 100644 index 000000000..22ca72c7d --- /dev/null +++ b/lib/rebase.js @@ -0,0 +1,85 @@ +var NodeGit = require("../"); +var Rebase = NodeGit.Rebase; +var normalizeOptions = NodeGit.Utils.normalizeOptions; +var shallowClone = NodeGit.Utils.shallowClone; + +var _init = Rebase.init; +var _open = Rebase.open; +/** + * Initializes a rebase + * @async + * @param {Repository} repo The repository to perform the rebase + * @param {AnnotatedCommit} branch The terminal commit to rebase, or NULL to + * rebase the current branch + * @param {AnnotatedCommit} upstream The commit to begin rebasing from, or NULL + * to rebase all reachable commits + * @param {AnnotatedCommit} onto The branch to rebase onto, or NULL to rebase + * onto the given upstream + * @param {RebaseOptions} options Options to specify how rebase is performed, + * or NULL + * @param {Function} callback + * @return {Remote} + */ + +function defaultRebaseOptions(options, checkoutStrategy) { + var checkoutOptions; + var mergeOptions; + + if (options) { + options = shallowClone(options); + checkoutOptions = options.checkoutOptions; + mergeOptions = options.mergeOptions; + delete options.checkoutOptions; + delete options.mergeOptions; + + options = normalizeOptions(options, NodeGit.RebaseOptions); + } else { + options = normalizeOptions({}, NodeGit.RebaseOptions); + if (checkoutStrategy) { + checkoutOptions = { + checkoutStrategy: checkoutStrategy + }; + } + } + + if (checkoutOptions) { + options.checkoutOptions = normalizeOptions( + checkoutOptions, + NodeGit.CheckoutOptions + ); + } + + if (mergeOptions) { + options.mergeOptions = normalizeOptions( + mergeOptions, + NodeGit.MergeOptions + ); + } + + return options; +} + +Rebase.init = function(repository, branch, upstream, onto, options) { + options = defaultRebaseOptions( + options, + NodeGit.Checkout.STRATEGY.FORCE + ); + return _init(repository, branch, upstream, onto, options); +}; + +/** + * Opens an existing rebase that was previously started by either an invocation + * of Rebase.open or by another client. + * @async + * @param {Repository} repo The repository that has a rebase in-progress + * @param {RebaseOptions} options Options to specify how rebase is performed + * @param {Function} callback + * @return {Remote} + */ +Rebase.open = function(repository, options) { + options = defaultRebaseOptions( + options, + NodeGit.Checkout.STRATEGY.SAFE + ); + return _open(repository, options); +}; diff --git a/lib/reference.js b/lib/reference.js index ce909cc0a..8cfdded6c 100644 --- a/lib/reference.js +++ b/lib/reference.js @@ -1,51 +1,59 @@ -var git = require('../'), - Reference = git.Reference; +var NodeGit = require("../"); +var LookupWrapper = NodeGit.Utils.lookupWrapper; -var oldSymbolicTarget = Reference.prototype.symbolicTarget, - oldTarget = Reference.prototype.target; +var Reference = NodeGit.Reference; +var Branch = NodeGit.Branch; -Reference.Type = { - Oid: 1, - Symbolic: 2, - All: 3 -}; +/** +* Retrieves the reference by it's short name +* @async +* @param {Repository} repo The repo that the reference lives in +* @param {String|Reference} id The reference to lookup +* @param {Function} callback +* @return {Reference} +*/ +Reference.dwim = LookupWrapper(Reference, Reference.dwim); + +/** +* Retrieves the reference pointed to by the oid +* @async +* @param {Repository} repo The repo that the reference lives in +* @param {String|Reference} id The reference to lookup +* @param {Function} callback +* @return {Reference} +*/ +Reference.lookup = LookupWrapper(Reference); /** * Returns true if this reference is not symbolic * @return {Boolean} */ Reference.prototype.isConcrete = function() { - return this.type() == Reference.Type.Oid; + return this.type() == Reference.TYPE.OID; }; /** - * Returns true if this reference is symbolic - * @return {Boolean} + * Returns if the ref is pointed at by HEAD + * @return {bool} */ -Reference.prototype.isSymbolic = function() { - return this.type() == Reference.Type.Symbolic; +Reference.prototype.isHead = function() { + return Branch.isHead(this); }; /** - * Returns the target of this symbolic reference. - * @return {Reference} - * @throws if the target is not symbolic. + * Returns true if this reference is symbolic + * @return {Boolean} */ -Reference.prototype.symbolicTarget = function() { - if (!this.isSymbolic()) throw this.name() + " is not symbolic"; - - return oldSymbolicTarget.call(this); +Reference.prototype.isSymbolic = function() { + return this.type() == Reference.TYPE.SYMBOLIC; }; /** - * Returns the oid of this non-symbolic reference. - * @return {Oid} - * @throws if the target is symbolic. + * Returns true if this reference is valid + * @return {Boolean} */ -Reference.prototype.target = function() { - if (!this.isConcrete()) throw this.name() + " is symbolic"; - - return oldTarget.call(this); +Reference.prototype.isValid = function() { + return this.type() != Reference.TYPE.INVALID; }; /** @@ -54,4 +62,4 @@ Reference.prototype.target = function() { */ Reference.prototype.toString = function() { return this.name(); -} \ No newline at end of file +}; diff --git a/lib/remote.js b/lib/remote.js new file mode 100644 index 000000000..4cd7e43a4 --- /dev/null +++ b/lib/remote.js @@ -0,0 +1,134 @@ +var NodeGit = require("../"); +var normalizeOptions = NodeGit.Utils.normalizeOptions; +var lookupWrapper = NodeGit.Utils.lookupWrapper; +var shallowClone = NodeGit.Utils.shallowClone; + +var Remote = NodeGit.Remote; +var _connect = Remote.prototype.connect; +var _download = Remote.prototype.download; +var _fetch = Remote.prototype.fetch; +var _push = Remote.prototype.push; + +/** + * Retrieves the remote by name + * @async + * @param {Repository} repo The repo that the remote lives in + * @param {String|Remote} name The remote to lookup + * @param {Function} callback + * @return {Remote} + */ +Remote.lookup = lookupWrapper(Remote); + +/** + * Connects to a remote + * + * @async + * @param {Enums.DIRECTION} direction The direction for the connection + * @param {RemoteCallbacks} callbacks The callback functions for the connection + * @param {ProxyOptions} proxyOpts Proxy settings + * @param {Array} customHeaders extra HTTP headers to use + * @param {Function} callback + * @return {Number} error code + */ +Remote.prototype.connect = function( + direction, + callbacks, + proxyOpts, + customHeaders +) { + callbacks = normalizeOptions(callbacks, NodeGit.RemoteCallbacks); + proxyOpts = normalizeOptions(proxyOpts || {}, NodeGit.ProxyOptions); + customHeaders = customHeaders || []; + + return _connect.call(this, direction, callbacks, proxyOpts, customHeaders); +}; + +/** + * Connects to a remote + * + * @async + * @param {Array} refSpecs The ref specs that should be pushed + * @param {FetchOptions} opts The fetch options for download, contains callbacks + * @param {Function} callback + * @return {Number} error code + */ +Remote.prototype.download = function(refspecs, opts) { + var callbacks; + + if (opts) { + opts = shallowClone(opts); + callbacks = opts.callbacks; + delete opts.callbacks; + } else { + opts = {}; + } + + opts = normalizeOptions(opts, NodeGit.FetchOptions); + + if (callbacks) { + opts.callbacks = + normalizeOptions(callbacks, NodeGit.RemoteCallbacks); + } + + return _download.call(this, refspecs, opts); +}; + +/** + * Connects to a remote + * + * @async + * @param {Array} refSpecs The ref specs that should be pushed + * @param {FetchOptions} opts The fetch options for download, contains callbacks + * @param {String} message The message to use for the update reflog messages + * @param {Function} callback + * @return {Number} error code + */ +Remote.prototype.fetch = function(refspecs, opts, reflog_message) { + var callbacks; + + if (opts) { + opts = shallowClone(opts); + callbacks = opts.callbacks; + delete opts.callbacks; + } else { + opts = {}; + } + + opts = normalizeOptions(opts, NodeGit.FetchOptions); + + if (callbacks) { + opts.callbacks = + normalizeOptions(callbacks, NodeGit.RemoteCallbacks); + } + + return _fetch.call(this, refspecs, opts, reflog_message); +}; + +/** + * Pushes to a remote + * + * @async + * @param {Array} refSpecs The ref specs that should be pushed + * @param {PushOptions} options Options for the checkout + * @param {Function} callback + * @return {Number} error code + */ +Remote.prototype.push = function(refSpecs, opts) { + var callbacks; + if (opts) { + opts = shallowClone(opts); + callbacks = opts.callbacks; + delete opts.callbacks; + } else { + opts = {}; + } + + opts = normalizeOptions(opts, NodeGit.PushOptions); + + if (callbacks) { + opts.callbacks = + normalizeOptions(callbacks, NodeGit.RemoteCallbacks); + } + + return _push.call(this, refSpecs, opts); +}; diff --git a/lib/repo.js b/lib/repo.js deleted file mode 100644 index 199d95fff..000000000 --- a/lib/repo.js +++ /dev/null @@ -1,220 +0,0 @@ -var git = require('../'), - util = require('./util.js'), - Repo = git.Repo, - Tree = git.Tree, - TreeBuilder = git.TreeBuilder, - Reference = git.Reference; - -var oldGetReference = Repo.prototype.getReference, - oldGetCommit = Repo.prototype.getCommit, - oldBlob = Repo.prototype.getBlob, - oldGetTree = Repo.prototype.getTree, - oldGetTag = Repo.prototype.getTag, - oldCreateRevWalk = Repo.prototype.createRevWalk, - oldCreateCommit = Repo.prototype.createCommit, - oldCreateBlobFromBuffer = Repo.prototype.createBlobFromBuffer; - -/** - * Look up a branch's most recent commit. - * - * @param {String} name Branch name, e.g. 'master' - * @param {Function} callback - * @return {Branch} - */ -Repo.prototype.getBranch = function(name, callback) { - var self = this; - this.getReference('refs/heads/' + name, function referenceLookupCallback(error, reference) { - if (error) return callback(error); - - self.getCommit(reference.target(), function commitLookupCallback(error, commit) { - if (error) return callback(error); - - callback(null, commit); - }); - }); -}; -util.makeSafe(Repo.prototype, 'getBranch'); - -/** - * Lookup the reference with the given name. - * - * @param {String} name - * @param {Function} callback - * @return {Reference} - */ -Repo.prototype.getReference = function(name, callback) { - var self = this; - oldGetReference.call(this, name, function(error, reference) { - if (error) return callback(error); - - if (reference.type() == Reference.Type.Symbolic) { - reference.resolve(function (error, reference) { - if (error) return callback(error); - reference.repo = self; - callback(null, reference); - }); - } else { - reference.repo = self; - callback(null, reference); - } - }); -}; -util.makeSafe(Repo.prototype, 'getReference'); - -/** - * Retrieve the commit identified by oid. - * - * @param {String|Oid} String sha or Oid - * @param {Function} callback - * @return {Commit} - */ -Repo.prototype.getCommit = function(oid, callback) { - var self = this; - oldGetCommit.call(this, oid, function(error, commit) { - if (error) return callback(error); - commit.repo = self; - callback(null, commit); - }); -}; -util.normalizeOid(Repo.prototype, 'getCommit'); -util.makeSafe(Repo.prototype, 'getCommit'); - -/** - * Retrieve the blob represented by the oid. - * - * @param {String|Oid} String sha or Oid - * @param {Function} callback - * @return {Blob} - */ -Repo.prototype.getBlob = function(oid, callback) { - var self = this; - oldBlob.call(this, oid, function(error, blob) { - if (error) return callback(error); - blob.repo = self; - callback(null, blob); - }); -}; -util.normalizeOid(Repo.prototype, 'getBlob'); -util.makeSafe(Repo.prototype, 'getBlob'); - -/** - * Retrieve the tree represented by the oid. - * - * @param {String|Oid} String sha or Oid - * @param {Function} callback - * @return {Tree} - */ -Repo.prototype.getTree = function(oid, callback) { - var self = this; - oldGetTree.call(this, oid, function(error, tree) { - if (error) return callback(error); - tree.repo = self; - callback(null, tree); - }); -}; -util.normalizeOid(Repo.prototype, 'getTree'); -util.makeSafe(Repo.prototype, 'getTree'); - -/** - * Retrieve the tag represented by the oid. - * - * @param {String|Oid} String sha or Oid - * @param {Function} callback - * @return {Tag} - */ -Repo.prototype.getTag = function(oid, callback) { - var self = this; - oldGetTag.call(this, oid, callback); -}; -util.normalizeOid(Repo.prototype, 'getTag'); -util.makeSafe(Repo.prototype, 'getTag'); - -/** - * Instantiate a new revision walker for browsing the Repo's history. - * See also `Commit.prototype.history()` - * - * @param {String|Oid} String sha or Oid - * @param {Function} callback - * @return {RevWalk} - */ -Repo.prototype.createRevWalk = function() { - var revWalk = oldCreateRevWalk.call(this); - revWalk.repo = this; - return revWalk; -}; - -/** - * Retrieve the master branch. - * - * @param {Function} callback - * @return {Branch} - */ -Repo.prototype.getMaster = function(callback) { - this.getBranch('master', callback); -}; - -/** - * Create a commit - * - * @param {String} updateRef - * @param {Signature} author - * @param {Signature} commiter - * @param {String} message - * @param {Tree|Oid|String} Tree - * @param {Array} parents - * @param {Function} callback - * @return {Oid} The oid of the commit - */ -Repo.prototype.createCommit = function(updateRef, author, committer, message, tree, parents, callback) { - if (tree instanceof Tree) { - oldCreateCommit.call( - this, - updateRef, - author, - committer, - null /* use default message encoding */, - message, - tree, - parents.length, parents, - callback); - } else { - var self = this; - this.getTree(tree, function(error, tree) { - if (error) return callback(error); - oldCreateCommit.call( - self, - updateRef, - author, - committer, - null /* use default message encoding */, - message, - tree, - parents.length, parents, - callback); - }); - } -}; - -/** - * Create a blob from a buffer - * - * @param {Buffer} buffer - * @param {Function} callback - * @return {Blob} - */ -Repo.prototype.createBlobFromBuffer = function(buffer, callback) { - oldCreateBlobFromBuffer.call(this, buffer, buffer.length, callback); -}; - -/** - * Create a new tree builder. - * - * @param {Tree} tree - * @param {Function} callback - */ -Repo.prototype.treeBuilder = function(callback) { - var builder = TreeBuilder.create(null); - builder.root = builder; - builder.repo = this; - return builder; -}; \ No newline at end of file diff --git a/lib/repository.js b/lib/repository.js new file mode 100644 index 000000000..87d000f40 --- /dev/null +++ b/lib/repository.js @@ -0,0 +1,1675 @@ +var promisify = require("promisify-node"); +var fse = promisify(require("fs-extra")); +var NodeGit = require("../"); +var Blob = NodeGit.Blob; +var Checkout = NodeGit.Checkout; +var Commit = NodeGit.Commit; +var normalizeOptions = NodeGit.Utils.normalizeOptions; +var shallowClone = NodeGit.Utils.shallowClone; +var path = require("path"); +var Reference = NodeGit.Reference; +var Remote = NodeGit.Remote; +var Repository = NodeGit.Repository; +var Revwalk = NodeGit.Revwalk; +var Status = NodeGit.Status; +var StatusFile = NodeGit.StatusFile; +var StatusList = NodeGit.StatusList; +var Submodule = NodeGit.Submodule; +var Tag = NodeGit.Tag; +var Tree = NodeGit.Tree; +var TreeBuilder = NodeGit.Treebuilder; + +var _discover = Repository.discover; +var _initExt = Repository.initExt; +var _fetchheadForeach = Repository.prototype.fetchheadForeach; +var _mergeheadForeach = Repository.prototype.mergeheadForeach; + +function applySelectedLinesToTarget + (originalContent, newLines, pathHunks, isStaged, reverse) { + // 43: ascii code for '+' + // 45: ascii code for '-' + var lineTypes = { + ADDED: !reverse ? 43 : 45, + DELETED: !reverse ? 45 : 43 + }; + var newContent = ""; + var oldIndex = 0; + var linesPromises = []; + + // split the original file into lines + var oldLines = originalContent.toString().split("\n"); + + // if no selected lines were sent, return the original content + if (!newLines || newLines.length === 0) { + return originalContent; + } + + function lineEqualsFirstNewLine(hunkLine) { + return ((hunkLine.oldLineno() === newLines[0].oldLineno()) && + (hunkLine.newLineno() === newLines[0].newLineno())); + } + + function processSelectedLine(hunkLine) { + // if this hunk line is a selected line find the selected line + var newLine = newLines.filter(function(nLine) { + return ((hunkLine.oldLineno() === nLine.oldLineno()) && + (hunkLine.newLineno() === nLine.newLineno())); + }); + + if (hunkLine.content().indexOf("\\ No newline at end of file") !== -1) { + return false; + } + + // determine what to add to the new content + if ((isStaged && newLine && newLine.length > 0) || + (!isStaged && (!newLine || newLine.length === 0))) { + if (hunkLine.origin() !== lineTypes.ADDED) { + newContent += hunkLine.content(); + } + if ((isStaged && hunkLine.origin() !== lineTypes.DELETED) || + (!isStaged && hunkLine.origin() !== lineTypes.ADDED)) { + oldIndex++; + } + } + else { + switch (hunkLine.origin()) { + case lineTypes.ADDED: + newContent += hunkLine.content(); + if (isStaged) { + oldIndex++; + } + break; + case lineTypes.DELETED: + if (!isStaged) { + oldIndex++; + } + break; + default: + newContent += oldLines[oldIndex++]; + if (oldIndex < oldLines.length) { + newContent += "\n"; + } + break; + } + } + } + + // find the affected hunk + pathHunks.forEach(function(pathHunk) { + linesPromises.push(pathHunk.lines()); + }); + + return Promise.all(linesPromises).then(function(results) { + for (var i = 0; i < results.length && newContent.length < 1; i++) { + var hunkStart = isStaged || reverse ? pathHunks[i].newStart() + : pathHunks[i].oldStart(); + var lines = results[i]; + if (lines.filter(lineEqualsFirstNewLine).length > 0) { + // add content that is before the hunk + while (hunkStart > (oldIndex + 1)) { + newContent += oldLines[oldIndex++] + "\n"; + } + + // modify the lines of the hunk according to the selection + lines.forEach(processSelectedLine); + + // add the rest of the file + while (oldLines.length > oldIndex) { + newContent += oldLines[oldIndex++] + + (oldLines.length > oldIndex ? "\n" : ""); + } + } + } + return newContent; + }); +} + +function getPathHunks(repo, index, filePath, isStaged, additionalDiffOptions) { + var diffOptions = additionalDiffOptions ? { + flags: additionalDiffOptions + } : undefined; + + return Promise.resolve() + .then(function() { + if (isStaged) { + return repo.getHeadCommit() + .then(function getTreeFromCommit(commit) { + return commit.getTree(); + }) + .then(function getDiffFromTree(tree) { + return NodeGit.Diff.treeToIndex(repo, tree, index, diffOptions); + }); + } + + return NodeGit.Diff.indexToWorkdir(repo, index, { + flags: + NodeGit.Diff.OPTION.SHOW_UNTRACKED_CONTENT | + NodeGit.Diff.OPTION.RECURSE_UNTRACKED_DIRS | + (additionalDiffOptions || 0) + }); + }) + .then(function(diff) { + if (!(NodeGit.Status.file(repo, filePath) & + NodeGit.Status.STATUS.WT_MODIFIED) && + !(NodeGit.Status.file(repo, filePath) & + NodeGit.Status.STATUS.INDEX_MODIFIED)) { + return Promise.reject + ("Selected staging is only available on modified files."); + } + + return diff.patches(); + }) + .then(function(patches) { + var pathPatch = patches.filter(function(patch) { + return patch.newFile().path() === filePath; + }); + + if (pathPatch.length !== 1) { + return Promise.reject("No differences found for this file."); + } + + return pathPatch[0].hunks(); + }); +} + +/** + * Goes through a rebase's rebase operations and commits them if there are + * no merge conflicts + * + * @param {Repository} repository The repository that the rebase is being + * performed in + * @param {Rebase} rebase The current rebase being performed + * @param {Signature} signature Identity of the one performing the rebase + * @param {Function} beforeNextFn Callback to be called before each + * invocation of next(). If the callback + * returns a promise, the next() will be + * called when the promise resolves. + * @return {Int|Index} An error code for an unsuccesful rebase or an index for + * a rebase with conflicts + */ +function performRebase(repository, rebase, signature, beforeNextFn) { + var beforeNextFnResult; + + function getPromise() { + return rebase.next() + .then(function() { + return repository.refreshIndex() + .then(function(index) { + if (index.hasConflicts()) { + throw index; + } + + rebase.commit(null, signature); + + return performRebase(repository, rebase, signature, beforeNextFn); + }); + }, function(error) { + if (error && error.errno === NodeGit.Error.CODE.ITEROVER) { + return rebase.finish(signature); + } else { + throw error; + } + }); + } + + if(beforeNextFn) { + beforeNextFnResult = beforeNextFn(rebase); + // if beforeNextFn returns a promise, chain the promise + return Promise.resolve(beforeNextFnResult) + .then(getPromise); + } + + return getPromise(); +} + +/** + * Creates a branch with the passed in name pointing to the commit + * + * @async + * @param {String} startPath The base path where the lookup starts. + * @param {Number} acrossFs If non-zero, then the lookup will not stop when a + filesystem device change is detected while exploring + parent directories. + * @param {String} ceilingDirs A list of absolute symbolic link free paths. + the search will stop if any of these paths + are hit. This may be set to null + * @return {String} Path of the git repository + */ +Repository.discover = function(startPath, acrossFs, ceilingDirs, callback) { + return _discover(startPath, acrossFs, ceilingDirs) + .then(function(foundPath) { + foundPath = path.resolve(foundPath); + if (typeof callback === "function") { + callback(null, foundPath); + } + return foundPath; + }, callback); +}; + +// Override Repository.initExt to normalize initoptions +Repository.initExt = function(repo_path, opts) { + opts = normalizeOptions(opts, NodeGit.RepositoryInitOptions); + return _initExt(repo_path, opts); +}; + + +Repository.getReferences = function(repo, type, refNamesOnly, callback) { + return Reference.list(repo).then(function(refList) { + var refFilterPromises = []; + var filteredRefs = []; + + refList.forEach(function(refName) { + refFilterPromises.push(Reference.lookup(repo, refName) + .then(function(ref) { + if (type == Reference.TYPE.LISTALL || ref.type() == type) { + if (refNamesOnly) { + filteredRefs.push(refName); + return; + } + + if (ref.isSymbolic()) { + return ref.resolve().then(function(resolvedRef) { + resolvedRef.repo = repo; + + filteredRefs.push(resolvedRef); + }) + .catch(function() { + // If we can't resolve the ref then just ignore it. + }); + } + else { + filteredRefs.push(ref); + } + } + }) + ); + }); + + return Promise.all(refFilterPromises).then(function() { + if (typeof callback === "function") { + callback(null, filteredRefs); + } + return filteredRefs; + }, callback); + }); +}; + +/** + * This will set the HEAD to point to the local branch and then attempt + * to update the index and working tree to match the content of the + * latest commit on that branch + * + * @async + * @param {String|Reference} branch the branch to checkout + * @param {Object|CheckoutOptions} opts the options to use for the checkout + */ +Repository.prototype.checkoutBranch = function(branch, opts) { + var repo = this; + + return repo.getReference(branch) + .then(function(ref) { + if (!ref.isBranch()) { + return false; + } + return repo.checkoutRef(ref, opts); + }); +}; + +/** + * This will set the HEAD to point to the reference and then attempt + * to update the index and working tree to match the content of the + * latest commit on that reference + * + * @async + * @param {Reference} reference the reference to checkout + * @param {Object|CheckoutOptions} opts the options to use for the checkout + */ +Repository.prototype.checkoutRef = function(reference, opts) { + var repo = this; + opts = opts || {}; + + opts.checkoutStrategy = opts.checkoutStrategy || + (NodeGit.Checkout.STRATEGY.SAFE | + NodeGit.Checkout.STRATEGY.RECREATE_MISSING); + return repo.getReferenceCommit(reference.name()) + .then(function(commit) { + return commit.getTree(); + }) + .then(function(tree) { + return Checkout.tree(repo, tree, opts); + }) + .then(function() { + var name = reference.name(); + return repo.setHead(name); + }); +}; + +/** + * Continues an existing rebase + * + * @async + * @param {Signature} signature Identity of the one performing the rebase + * @param {Function} beforeNextFn Callback to be called before each step + * of the rebase. If the callback returns a + * promise, the rebase will resume when the + * promise resolves. The rebase object is + * is passed to the callback. + * @return {Oid|Index} A commit id for a succesful merge or an index for a + * rebase with conflicts + */ +Repository.prototype.continueRebase = function(signature, beforeNextFn) { + var repo = this; + + signature = signature || repo.defaultSignature(); + + return repo.refreshIndex() + .then(function(index) { + if (index.hasConflicts()) { + throw index; + } + + return NodeGit.Rebase.open(repo); + }) + .then(function(rebase) { + rebase.commit(null, signature); + + return performRebase(repo, rebase, signature, beforeNextFn); + }) + .then(function(error) { + if (error) { + throw error; + } + + return repo.getBranchCommit("HEAD"); + }); +}; + +/** + * Creates a branch with the passed in name pointing to the commit + * + * @async + * @param {String} name Branch name, e.g. "master" + * @param {Commit|String|Oid} commit The commit the branch will point to + * @param {bool} force Overwrite branch if it exists + * @param {Signature} signature Identity to use to populate reflog + * @param {String} logMessage One line message to be appended to the reflog + * @return {Reference} + */ +Repository.prototype.createBranch = function(name, commit, force) { + var repo = this; + + if (commit instanceof Commit) { + return NodeGit.Branch.create( + repo, + name, + commit, + force ? 1 : 0); + } + else { + return repo.getCommit(commit).then(function(commit) { + return NodeGit.Branch.create( + repo, + name, + commit, + force ? 1 : 0); + }); + } +}; + +/** + * Create a blob from a buffer + * + * @param {Buffer} buffer + * @return {Oid} + */ +Repository.prototype.createBlobFromBuffer = function(buffer, callback) { + return Blob.createFromBuffer(this, buffer, buffer.length, callback); +}; + +/** + * Create a commit + * + * @async + * @param {String} updateRef + * @param {Signature} author + * @param {Signature} committer + * @param {String} message + * @param {Tree|Oid|String} Tree + * @param {Array} parents + * @return {Oid} The oid of the commit + */ +Repository.prototype.createCommit = function( + updateRef, author, committer, message, tree, parents, callback) { + + var repo = this; + var promises = []; + + parents = parents || []; + + promises.push(repo.getTree(tree)); + + parents.forEach(function(parent) { + promises.push(repo.getCommit(parent)); + }); + + return Promise.all(promises).then(function(results) { + tree = results[0]; + + // Get the normalized values for our input into the function + var parentsLength = parents.length; + parents = []; + + for (var i = 0; i < parentsLength; i++) { + parents.push(results[i + 1]); + } + + return Commit.create( + repo, + updateRef, + author, + committer, + null /* use default message encoding */, + message, + tree, + parents.length, + parents + ); + }).then(function(commit) { + if (typeof callback === "function") { + callback(null, commit); + } + + return commit; + }, callback); +}; + +/** + * Creates a new commit on HEAD from the list of passed in files + * + * @async + * @param {Array} filesToAdd + * @param {Signature} author + * @param {Signature} committer + * @param {String} message + * @return {Oid} The oid of the new commit + */ +Repository.prototype.createCommitOnHead = function( + filesToAdd, + author, + committer, + message, + callback) { + + var repo = this; + + return repo.refreshIndex() + .then(function(index) { + if (!filesToAdd) { + filesToAdd = []; + } + + return filesToAdd + .reduce(function(lastFilePromise, filePath) { + return lastFilePromise + .then(function() { + return index.addByPath(filePath); + }); + }, Promise.resolve()) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }); + }) + .then(function(treeOid) { + return repo.getHeadCommit() + .then(function(parent) { + if (parent !== null) { // To handle a fresh repo with no commits + parent = [parent]; + } + return repo.createCommit( + "HEAD", + author, + committer, + message, + treeOid, + parent + ); + }); + }, callback); +}; + +/** + * Creates a new lightweight tag + * + * @async + * @param {String|Oid} String sha or Oid + * @param {String} name the name of the tag + * @return {Reference} + */ +Repository.prototype.createLightweightTag = function(oid, name, callback) { + var repository = this; + + return Commit.lookup(repository, oid) + .then(function(commit) { + // Final argument is `force` which overwrites any previous tag + return Tag.createLightweight(repository, name, commit, 0); + }) + .then(function() { + return Reference.lookup(repository, "refs/tags/" + name); + }); +}; + +/** + * Instantiate a new revision walker for browsing the Repository"s history. + * See also `Commit.prototype.history()` + * + * @param {String|Oid} String sha or Oid + * @return {RevWalk} + */ +Repository.prototype.createRevWalk = function() { + return Revwalk.create(this); +}; + +/** + * Creates a new annotated tag + * + * @async + * @param {String|Oid} String sha or Oid + * @param {String} name the name of the tag + * @param {String} message the description that will be attached to the + * annotated tag + * @return {Tag} + */ +Repository.prototype.createTag = function(oid, name, message, callback) { + var repository = this; + var signature = repository.defaultSignature(); + + return Commit.lookup(repository, oid) + .then(function(commit) { + // Final argument is `force` which overwrites any previous tag + return Tag.create(repository, name, commit, signature, message, 0); + }) + .then(function(tagOid) { + return repository.getTag(tagOid, callback); + }); +}; + +/** + * Gets the default signature for the default user and now timestamp + * @return {Signature} + */ +Repository.prototype.defaultSignature = function() { + var result = NodeGit.Signature.default(this); + + if (!result || !result.name()) { + result = NodeGit.Signature.now("unknown", "unknown@unknown.com"); + } + + return result; +}; + +/** + * Deletes a tag from a repository by the tag name. + * + * @async + * @param {String} Short or full tag name + */ +Repository.prototype.deleteTagByName = function(name) { + var repository = this; + + name = ~name.indexOf("refs/tags/") ? name.substr(10) : name; + + return Tag.delete(repository, name); +}; + +/** + * Discard line selection of a specified file. + * Assumes selected lines are unstaged. + * + * @async + * @param {String} filePath The relative path of this file in the repo + * @param {Array} selectedLines The array of DiffLine objects + * selected for discarding + * @return {Number} 0 or an error code + */ +Repository.prototype.discardLines = + function(filePath, selectedLines, additionalDiffOptions) { + var repo = this; + var fullFilePath = path.join(repo.workdir(), filePath); + var index; + var originalContent; + + return repo.refreshIndex() + .then(function(indexResult) { + index = indexResult; + + return fse.readFile(fullFilePath, "utf8"); + }) + .then(function(content) { + originalContent = content; + + return getPathHunks(repo, index, filePath, false, additionalDiffOptions); + }) + .then(function(hunks) { + return applySelectedLinesToTarget( + originalContent, selectedLines, hunks, false, true + ); + }) + .then(function(newContent) { + return fse.writeFile(fullFilePath, newContent); + }); +}; + +/** + * Fetches from a remote + * + * @async + * @param {String|Remote} remote + * @param {Object|FetchOptions} fetchOptions Options for the fetch, includes + * callbacks for fetching + */ +Repository.prototype.fetch = function( + remote, + fetchOptions, + callback) +{ + var repo = this; + + function finallyFn(error) { + if (typeof callback === "function") { + callback(error); + } + } + + return repo.getRemote(remote) + .then(function(remote) { + return remote.fetch(null, fetchOptions, "Fetch from " + remote) + .then(function() { + return remote.disconnect(); + }); + }) + .then(finallyFn) + .catch(function(error) { + finallyFn(error); + throw error; + }); +}; + +/** + * Fetches from all remotes. This is done in series due to deadlocking issues + * with fetching from many remotes that can happen. + * + * @async + * @param {Object|FetchOptions} fetchOptions Options for the fetch, includes + * callbacks for fetching + * @param {Function} callback + */ +Repository.prototype.fetchAll = function( + fetchOptions, + callback) +{ + var repo = this; + + function createCallbackWrapper(fn, remote) { + return function() { + var args = Array.prototype.slice.call(arguments); + args.push(remote); + + return fn.apply(this, args); + }.bind(this); + } + + fetchOptions = fetchOptions || {}; + + var remoteCallbacks = fetchOptions.callbacks || {}; + + var credentials = remoteCallbacks.credentials; + var certificateCheck = remoteCallbacks.certificateCheck; + var transferProgress = remoteCallbacks.transferProgress; + + return repo.getRemotes() + .then(function(remotes) { + return remotes.reduce(function(fetchPromise, remote) { + var wrappedFetchOptions = shallowClone(fetchOptions); + var wrappedRemoteCallbacks = shallowClone(remoteCallbacks); + + if (credentials) { + wrappedRemoteCallbacks.credentials = + createCallbackWrapper(credentials, remote); + } + + if (certificateCheck) { + wrappedRemoteCallbacks.certificateCheck = + createCallbackWrapper(certificateCheck, remote); + } + + if (transferProgress) { + wrappedRemoteCallbacks.transferProgress = + createCallbackWrapper(transferProgress, remote); + } + + wrappedFetchOptions.callbacks = wrappedRemoteCallbacks; + + return fetchPromise.then(function() { + return repo.fetch(remote, wrappedFetchOptions); + }); + }, Promise.resolve()); + }) + .then(function() { + if (typeof callback === "function") { + callback(); + } + }); +}; + +/** + * @async + * @param {FetchheadForeachCb} callback The callback function to be called on + * each entry + */ +Repository.prototype.fetchheadForeach = function(callback) { + return _fetchheadForeach.call(this, callback, null); +}; + +/** + * Retrieve the blob represented by the oid. + * + * @async + * @param {String|Oid} String sha or Oid + * @return {Blob} + */ +Repository.prototype.getBlob = function(oid, callback) { + var repository = this; + + return Blob.lookup(repository, oid).then(function(blob) { + blob.repo = repository; + + if (typeof callback === "function") { + callback(null, blob); + } + + return blob; + }, callback); +}; + +/** +* Look up a branch. Alias for `getReference` +* +* @async +* @param {String|Reference} name Ref name, e.g. "master", "refs/heads/master" +* or Branch Ref +* @return {Reference} +*/ +Repository.prototype.getBranch = function(name, callback) { + return this.getReference(name, callback); +}; + +/** +* Look up a branch's most recent commit. Alias to `getReferenceCommit` +* +* @async +* @param {String|Reference} name Ref name, e.g. "master", "refs/heads/master" +* or Branch Ref +* @return {Commit} +*/ +Repository.prototype.getBranchCommit = function(name, callback) { + return this.getReferenceCommit(name, callback); +}; + +/** + * Retrieve the commit identified by oid. + * + * @async + * @param {String|Oid} String sha or Oid + * @return {Commit} + */ +Repository.prototype.getCommit = function(oid, callback) { + var repository = this; + + return Commit.lookup(repository, oid).then(function(commit) { + commit.repo = repository; + + if (typeof callback === "function") { + callback(null, commit); + } + + return commit; + }, callback); +}; + +/** + * Gets the branch that HEAD currently points to + * Is an alias to head() + * + * @async + * @return {Reference} + */ +Repository.prototype.getCurrentBranch = function() { + return this.head(); +}; + +/** + * Retrieve the commit that HEAD is currently pointing to + * + * @async + * @return {Commit} + */ +Repository.prototype.getHeadCommit = function(callback) { + var repo = this; + + return Reference.nameToId(repo, "HEAD") + .then(function(head) { + return repo.getCommit(head, callback); + }) + .catch(function() { + return null; + }); +}; + +/** + * Retrieve the master branch commit. + * + * @async + * @return {Commit} + */ +Repository.prototype.getMasterCommit = function(callback) { + return this.getBranchCommit("master", callback); +}; + +/** + * Lookup the reference with the given name. + * + * @async + * @param {String|Reference} name Ref name, e.g. "master", "refs/heads/master" + * or Branch Ref + * @return {Reference} + */ +Repository.prototype.getReference = function(name, callback) { + var repository = this; + + return Reference.dwim(this, name).then(function(reference) { + if (reference.isSymbolic()) { + return reference.resolve().then(function(reference) { + reference.repo = repository; + + if (typeof callback === "function") { + callback(null, reference); + } + + return reference; + }, callback); + } else { + reference.repo = repository; + if (typeof callback === "function") { + callback(null, reference); + } + return reference; + } + }, callback); +}; + +/** + * Look up a refs's commit. + * + * @async + * @param {String|Reference} name Ref name, e.g. "master", "refs/heads/master" + * or Branch Ref + * @return {Commit} + */ +Repository.prototype.getReferenceCommit = function(name, callback) { + var repository = this; + + return this.getReference(name).then(function(reference) { + return repository.getCommit(reference.target()).then(function(commit) { + if (typeof callback === "function") { + callback(null, commit); + } + + return commit; + }); + }, callback); +}; + +/** + * Lookup reference names for a repository. + * + * @async + * @param {Reference.TYPE} type Type of reference to look up + * @return {Array} + */ +Repository.prototype.getReferenceNames = function(type, callback) { + return Repository.getReferences(this, type, true, callback); +}; + +/** + * Lookup references for a repository. + * + * @async + * @param {Reference.TYPE} type Type of reference to look up + * @return {Array} + */ +Repository.prototype.getReferences = function(type, callback) { + return Repository.getReferences(this, type, false, callback); +}; + +/** + * Gets a remote from the repo + * + * @async + * @param {String|Remote} remote + * @param {Function} callback + * @return {Remote} The remote object + */ +Repository.prototype.getRemote = function(remote, callback) { + if (remote instanceof NodeGit.Remote) { + return Promise.resolve(remote).then(function(remoteObj) { + if (typeof callback === "function") { + callback(null, remoteObj); + } + + return remoteObj; + }, callback); + } + + return NodeGit.Remote.lookup(this, remote).then(function(remoteObj) { + if (typeof callback === "function") { + callback(null, remoteObj); + } + + return remoteObj; + }, callback); +}; + +/** +* Lists out the remotes in the given repository. +* +* @async +* @param {Function} Optional callback +* @return {Object} Promise object. +*/ +Repository.prototype.getRemotes = function(callback) { + return Remote.list(this).then(function(remotes) { + if (typeof callback === "function") { + callback(null, remotes); + } + + return remotes; + }, callback); +}; + +/** + * Get the status of a repo to it's working directory + * + * @async + * @param {obj} opts + * @return {Array} + */ +Repository.prototype.getStatus = function(opts) { + var statuses = []; + var statusCallback = function(path, status) { + statuses.push(new StatusFile({path: path, status: status})); + }; + + if (!opts) { + opts = { + flags: Status.OPT.INCLUDE_UNTRACKED | + Status.OPT.RECURSE_UNTRACKED_DIRS + }; + } + + return Status.foreachExt(this, opts, statusCallback).then(function() { + return statuses; + }); +}; + +/** + * Get extended statuses of a repo to it's working directory. Status entries + * have `status`, `headToIndex` delta, and `indexToWorkdir` deltas + * + * @async + * @param {obj} opts + * @return {Array} + */ +Repository.prototype.getStatusExt = function(opts) { + var statuses = []; + + if (!opts) { + opts = { + flags: Status.OPT.INCLUDE_UNTRACKED | + Status.OPT.RECURSE_UNTRACKED_DIRS | + Status.OPT.RENAMES_INDEX_TO_WORKDIR | + Status.OPT.RENAMES_HEAD_TO_INDEX | + Status.OPT.RENAMES_FROM_REWRITES + }; + } + + return StatusList.create(this, opts) + .then(function(list) { + for (var i = 0; i < list.entrycount(); i++) { + var entry = Status.byIndex(list, i); + statuses.push(new StatusFile({entry: entry})); + } + + return statuses; + }); +}; + +/** + * Get the names of the submodules in the repository. + * + * @async + * @return {Array} + */ +Repository.prototype.getSubmoduleNames = function(callback) { + var names = []; + var submoduleCallback = function(submodule, name, payload) { + names.push(name); + }; + + return Submodule.foreach(this, submoduleCallback).then(function() { + if (typeof callback === "function") { + callback(null, names); + } + + return names; + }); +}; + +/** + * Retrieve the tag represented by the oid. + * + * @async + * @param {String|Oid} String sha or Oid + * @return {Tag} + */ +Repository.prototype.getTag = function(oid, callback) { + var repository = this; + + return Tag.lookup(repository, oid).then(function(reference) { + reference.repo = repository; + + if (typeof callback === "function") { + callback(null, reference); + } + + return reference; + }, callback); +}; + +/** + * Retrieve the tag represented by the tag name. + * + * @async + * @param {String} Short or full tag name + * @return {Tag} + */ +Repository.prototype.getTagByName = function(name, callback) { + var repo = this; + + name = ~name.indexOf("refs/tags/") ? name : "refs/tags/" + name; + + return Reference.nameToId(repo, name).then(function(oid) { + return Tag.lookup(repo, oid).then(function(reference) { + reference.repo = repo; + + if (typeof callback === "function") { + callback(null, reference); + } + + return reference; + }); + }, callback); +}; + +/** + * Retrieve the tree represented by the oid. + * + * @async + * @param {String|Oid} String sha or Oid + * @return {Tree} + */ +Repository.prototype.getTree = function(oid, callback) { + var repository = this; + + return Tree.lookup(repository, oid).then(function(tree) { + tree.repo = repository; + + if (typeof callback === "function") { + callback(null, tree); + } + + return tree; + }, callback); +}; + +/** + * Returns true if the repository is in the APPLY_MAILBOX or + * APPLY_MAILBOX_OR_REBASE state. + * @return {Boolean} + */ +Repository.prototype.isApplyingMailbox = function() { + var state = this.state(); + return state === NodeGit.Repository.STATE.APPLY_MAILBOX || + state === NodeGit.Repository.STATE.APPLY_MAILBOX_OR_REBASE; +}; + +/** + * Returns true if the repository is in the BISECT state. + * @return {Boolean} + */ +Repository.prototype.isBisecting = function() { + return this.state() === NodeGit.Repository.STATE.BISECT; +}; + +/** + * Returns true if the repository is in the CHERRYPICK state. + * @return {Boolean} + */ +Repository.prototype.isCherrypicking = function() { + return this.state() === NodeGit.Repository.STATE.CHERRYPICK; +}; + +/** + * Returns true if the repository is in the default NONE state. + * @return {Boolean} + */ +Repository.prototype.isDefaultState = function() { + return this.state() === NodeGit.Repository.STATE.NONE; +}; + +/** + * Returns true if the repository is in the MERGE state. + * @return {Boolean} + */ +Repository.prototype.isMerging = function() { + return this.state() === NodeGit.Repository.STATE.MERGE; +}; + +/** + * Returns true if the repository is in the REBASE, REBASE_INTERACTIVE, or + * REBASE_MERGE state. + * @return {Boolean} + */ +Repository.prototype.isRebasing = function() { + var state = this.state(); + return state === NodeGit.Repository.STATE.REBASE || + state === NodeGit.Repository.STATE.REBASE_INTERACTIVE || + state === NodeGit.Repository.STATE.REBASE_MERGE; +}; + +/** + * Returns true if the repository is in the REVERT state. + * @return {Boolean} + */ +Repository.prototype.isReverting = function() { + return this.state() === NodeGit.Repository.STATE.REVERT; +}; + +/** + * Rebases a branch onto another branch + * + * @async + * @param {String} branch + * @param {String} upstream + * @param {String} onto + * @param {Signature} signature Identity of the one performing the rebase + * @param {Function} beforeNextFn Callback to be called before each step + * of the rebase. If the callback returns a + * promise, the rebase will resume when the + * promise resolves. The rebase object is + * is passed to the callback. + * @return {Oid|Index} A commit id for a succesful merge or an index for a + * rebase with conflicts + */ +Repository.prototype.rebaseBranches = function( + branch, + upstream, + onto, + signature, + beforeNextFn, + rebaseOptions +) +{ + var repo = this; + var branchCommit; + var upstreamCommit; + var ontoCommit; + var mergeOptions = (rebaseOptions || {}).mergeOptions; + signature = signature || repo.defaultSignature(); + + return Promise.all([ + repo.getReference(branch), + upstream ? repo.getReference(upstream) : null, + onto ? repo.getReference(onto) : null + ]) + .then(function(refs) { + return Promise.all([ + NodeGit.AnnotatedCommit.fromRef(repo, refs[0]), + upstream ? NodeGit.AnnotatedCommit.fromRef(repo, refs[1]) : null, + onto ? NodeGit.AnnotatedCommit.fromRef(repo, refs[2]) : null + ]); + }) + .then(function(annotatedCommits) { + branchCommit = annotatedCommits[0]; + upstreamCommit = annotatedCommits[1]; + ontoCommit = annotatedCommits[2]; + + return NodeGit.Merge.base(repo, branchCommit.id(), upstreamCommit.id()); + }) + .then(function(oid) { + if (oid.toString() === branchCommit.id().toString()) { + // we just need to fast-forward + return repo.mergeBranches(branch, upstream, null, null, mergeOptions) + .then(function() { + // checkout 'branch' to match the behavior of rebase + return repo.checkoutBranch(branch); + }); + } else if (oid.toString() === upstreamCommit.id().toString()) { + // 'branch' is already on top of 'upstream' + // checkout 'branch' to match the behavior of rebase + return repo.checkoutBranch(branch); + } + + return NodeGit.Rebase.init( + repo, + branchCommit, + upstreamCommit, + ontoCommit, + rebaseOptions + ) + .then(function(rebase) { + return performRebase(repo, rebase, signature, beforeNextFn); + }) + .then(function(error) { + if (error) { + throw error; + } + }); + }) + .then(function() { + return repo.getBranchCommit("HEAD"); + }); +}; + +/** + * Grabs a fresh copy of the index from the repository. Invalidates + * all previously grabbed indexes + * + * @async + * @return {Index} + */ +Repository.prototype.refreshIndex = function(callback) { + var repo = this; + + repo.setIndex(); // clear the index + + return repo.index() + .then(function(index) { + if (typeof callback === "function") { + callback(null, index); + } + + return index; + }, callback); +}; + +/** + * Merge a branch onto another branch + * + * @async + * @param {String|Reference} to + * @param {String|Reference} from + * @param {Signature} signature + * @param {Merge.PREFERENCE} mergePreference + * @param {MergeOptions} mergeOptions + * @return {Oid|Index} A commit id for a succesful merge or an index for a + * merge with conflicts + */ +Repository.prototype.mergeBranches = + function(to, from, signature, mergePreference, mergeOptions) { + var repo = this; + var fromBranch; + var toBranch; + + mergePreference = mergePreference || NodeGit.Merge.PREFERENCE.NONE; + mergeOptions = normalizeOptions(mergeOptions, NodeGit.MergeOptions); + + signature = signature || repo.defaultSignature(); + + return Promise.all([ + repo.getBranch(to), + repo.getBranch(from) + ]).then(function(objects) { + toBranch = objects[0]; + fromBranch = objects[1]; + + return Promise.all([ + repo.getBranchCommit(toBranch), + repo.getBranchCommit(fromBranch) + ]); + }) + .then(function(branchCommits) { + var toCommitOid = branchCommits[0].toString(); + var fromCommitOid = branchCommits[1].toString(); + + return NodeGit.Merge.base(repo, toCommitOid, fromCommitOid) + .then(function(baseCommit) { + if (baseCommit.toString() == fromCommitOid) { + // The commit we're merging to is already in our history. + // nothing to do so just return the commit the branch is on + return toCommitOid; + } + else if (baseCommit.toString() == toCommitOid && + mergePreference !== NodeGit.Merge.PREFERENCE.NO_FASTFORWARD) { + // fast forward + var message = + "Fast forward branch " + + toBranch.shorthand() + + " to branch " + + fromBranch.shorthand(); + + return branchCommits[1].getTree() + .then(function(tree) { + if (toBranch.isHead()) { + // Checkout the tree if we're on the branch + var opts = { + checkoutStrategy: NodeGit.Checkout.STRATEGY.SAFE | + NodeGit.Checkout.STRATEGY.RECREATE_MISSING + }; + return NodeGit.Checkout.tree(repo, tree, opts); + } + }) + .then(function() { + return toBranch.setTarget( + fromCommitOid, + message) + .then(function() { + return fromCommitOid; + }); + }); + } + else if (mergePreference !== NodeGit.Merge.PREFERENCE.FASTFORWARD_ONLY) { + var updateHead; + // We have to merge. Lets do it! + return NodeGit.Reference.lookup(repo, "HEAD") + .then(function(headRef) { + return headRef.resolve(); + }) + .then(function(headRef) { + updateHead = !!headRef && (headRef.name() === toBranch.name()); + return NodeGit.Merge.commits( + repo, + toCommitOid, + fromCommitOid, + mergeOptions + ); + }) + .then(function(index) { + // if we have conflicts then throw the index + if (index.hasConflicts()) { + throw index; + } + + // No conflicts so just go ahead with the merge + return index.writeTreeTo(repo); + }) + .then(function(oid) { + var message = + "Merged " + + fromBranch.shorthand() + + " into " + + toBranch.shorthand(); + + return repo.createCommit( + toBranch.name(), + signature, + signature, + message, + oid, + [toCommitOid, fromCommitOid]); + }) + .then(function(commit) { + // we've updated the checked out branch, so make sure we update + // head so that our index isn't messed up + if (updateHead) { + return repo.getBranch(to) + .then(function(branch) { + return repo.getBranchCommit(branch); + }) + .then(function(branchCommit) { + return branchCommit.getTree(); + }) + .then(function(tree) { + var opts = { + checkoutStrategy: NodeGit.Checkout.STRATEGY.SAFE | + NodeGit.Checkout.STRATEGY.RECREATE_MISSING + }; + return NodeGit.Checkout.tree(repo, tree, opts); + }) + .then(function() { + return commit; + }); + } + else { + return commit; + } + }); + } + else { + // A non fast-forwardable merge with ff-only + return toCommitOid; + } + }); + }); +}; + +/** + * @async + * @param {MergeheadForeachCb} callback The callback function to be called on + * each entry + */ +Repository.prototype.mergeheadForeach = function(callback) { + return _mergeheadForeach.call(this, callback, null); +}; + +/** + * Stages or unstages line selection of a specified file + * + * @async + * @param {String|Array} filePath The relative path of this file in the repo + * @param {Boolean} stageNew Set to stage new filemode. Unset to unstage. + * @return {Number} 0 or an error code + */ +Repository.prototype.stageFilemode = + function(filePath, stageNew, additionalDiffOptions) { + var repo = this; + var index; + var diffOptions = additionalDiffOptions ? { + flags: additionalDiffOptions + } : undefined; + var diffPromise = stageNew ? + NodeGit.Diff.indexToWorkdir(repo, index, { + flags: + NodeGit.Diff.OPTION.SHOW_UNTRACKED_CONTENT | + NodeGit.Diff.OPTION.RECURSE_UNTRACKED_DIRS | + (additionalDiffOptions || 0) + }) + : + repo.getHeadCommit() + .then(function getTreeFromCommit(commit) { + return commit.getTree(); + }) + .then(function getDiffFromTree(tree) { + return NodeGit.Diff.treeToIndex(repo, tree, index, diffOptions); + }); + var filePaths = (filePath instanceof Array) ? filePath : [filePath]; + + var indexLock = repo.path().replace(".git/", "") + ".git/index.lock"; + + return fse.remove(indexLock) + .then(function() { + return repo.refreshIndex(); + }) + .then(function(indexResult) { + index = indexResult; + }) + .then(function() { + return diffPromise; + }) + .then(function(diff) { + var origLength = filePaths.length; + filePaths = filePaths.filter(function(p) { + return ( + (NodeGit.Status.file(repo, p) & NodeGit.Status.STATUS.WT_MODIFIED) || + (NodeGit.Status.file(repo, p) & NodeGit.Status.STATUS.INDEX_MODIFIED) + ); + }); + if (filePaths.length === 0 && origLength > 0) { + return Promise.reject + ("Selected staging is only available on modified files."); + } + return diff.patches(); + }) + .then(function(patches) { + var pathPatches = patches.filter(function(patch) { + return ~filePaths.indexOf(patch.newFile().path()); + }); + if (pathPatches.length === 0) { + return Promise.reject("No differences found for this file."); + } + + return pathPatches + .reduce(function(lastIndexAddPromise, pathPatch) { + var entry = index.getByPath(pathPatch.newFile().path(), 0); + + entry.mode = stageNew ? + pathPatch.newFile().mode() : pathPatch.oldFile().mode(); + + return lastIndexAddPromise + .then(function() { + return index.add(entry); + }); + }, Promise.resolve()); + }) + .then(function() { + return index.write(); + }); +}; + +/** + * Stages or unstages line selection of a specified file + * + * @async + * @param {String} filePath The relative path of this file in the repo + * @param {Array} selectedLines The array of DiffLine objects + * selected for staging or unstaging + * @param {Boolean} isStaged Are the selected lines currently staged + * @return {Number} 0 or an error code + */ +Repository.prototype.stageLines = + function(filePath, selectedLines, isSelectionStaged, additionalDiffOptions) { + + var repo = this; + var index; + var originalBlob; + + // The following chain checks if there is a patch with no hunks left for the + // file, and no filemode changes were done on the file. It is then safe to + // stage the entire file so the file doesn't show as having unstaged changes + // in `git status`. Also, check if there are no type changes. + var lastHunkStagedPromise = function lastHunkStagedPromise(result) { + return NodeGit.Diff.indexToWorkdir(repo, index, { + flags: + NodeGit.Diff.OPTION.SHOW_UNTRACKED_CONTENT | + NodeGit.Diff.OPTION.RECURSE_UNTRACKED_DIRS | + (additionalDiffOptions || 0) + }) + .then(function(diff) { + return diff.patches(); + }) + .then(function(patches) { + var pathPatch = patches.filter(function(patch) { + return patch.newFile().path() === filePath; + }); + var emptyPatch = false; + if (pathPatch.length > 0) { + // No hunks, unchanged file mode, and no type changes. + emptyPatch = pathPatch[0].size() === 0 && + pathPatch[0].oldFile().mode() === pathPatch[0].newFile().mode() && + !pathPatch[0].isTypeChange(); + } + if (emptyPatch) { + return index.addByPath(filePath) + .then(function() { + return index.write(); + }); + } + + return result; + }); + }; + + return repo.refreshIndex() + .then(function(indexResult) { + index = indexResult; + var pathOid = index.getByPath(filePath).id; + + return repo.getBlob(pathOid); + }) + .then(function(blob) { + originalBlob = blob; + + return getPathHunks( + repo, + index, + filePath, + isSelectionStaged, + additionalDiffOptions + ); + }) + .then(function(hunks) { + return applySelectedLinesToTarget( + originalBlob, selectedLines, hunks, isSelectionStaged + ); + }) + .then(function(newContent) { + var newContentBuffer = new Buffer(newContent); + + var newOid = repo.createBlobFromBuffer(newContentBuffer); + return repo.getBlob(newOid); + }) + .then(function(newBlob) { + var entry = index.getByPath(filePath, 0); + entry.id = newBlob.id(); + entry.path = filePath; + entry.fileSize = newBlob.content().length; + + return index.add(entry); + }) + .then(function() { + return index.write(); + }) + .then(function(result) { + if (isSelectionStaged) { + return result; + } + + return lastHunkStagedPromise(result); + }); +}; + +/** + * Create a new tree builder. + * + * @param {Tree} tree + */ +Repository.prototype.treeBuilder = function() { + var builder = TreeBuilder.create(null); + + builder.root = builder; + builder.repo = this; + + return builder; +}; diff --git a/lib/reset.js b/lib/reset.js new file mode 100644 index 000000000..6c5933817 --- /dev/null +++ b/lib/reset.js @@ -0,0 +1,51 @@ +var NodeGit = require("../"); +var normalizeOptions = NodeGit.Utils.normalizeOptions; + +var Reset = NodeGit.Reset; +var _default = Reset.default; +var _reset = Reset.reset; + +/** + * Look up a refs's commit. + * + * @async + * @param {Repository} repo Repository where to perform the reset operation. + * @param {Object} target The committish which content will be used to reset the + * content of the index. + * @param {Strarray} pathspecs List of pathspecs to operate on. + * + * @return {Number} 0 on success or an error code + */ +Reset.default = function(repo, target, pathspecs) { + return _default.call(this, repo, target, pathspecs); +}; + +/** + * Look up a refs's commit. + * + * @async + * @param {Repository} repo Repository where to perform the reset operation. + * + * @param {Object} target Committish to which the Head should be moved to. This + * object must belong to the given `repo` and can either + * be a git_commit or a git_tag. When a git_tag is being + * passed, it should be dereferencable to a git_commit + * which oid will be used as the target of the branch. + * @param {Number} resetType Kind of reset operation to perform. + * + * @param {CheckoutOptions} opts Checkout options to be used for a HARD reset. + * The checkout_strategy field will be overridden + * (based on reset_type). This parameter can be + * used to propagate notify and progress + * callbacks. + * + * @param {String|Ref} name Ref name, e.g. "master", "refs/heads/master" + * or Branch Ref + * + * @return {Number} 0 on success or an error code + */ +Reset.reset = function(repo, target, resetType, opts) { + opts = normalizeOptions(opts, NodeGit.CheckoutOptions); + + return _reset.call(this, repo, target, resetType, opts); +}; diff --git a/lib/revert.js b/lib/revert.js new file mode 100644 index 000000000..aee3f0baf --- /dev/null +++ b/lib/revert.js @@ -0,0 +1,46 @@ +var NodeGit = require("../"); +var normalizeOptions = NodeGit.Utils.normalizeOptions; + +var Revert = NodeGit.Revert; +var _commit = Revert.commit; + +/** + * Reverts the given commit against the given "our" commit, producing an index + * that reflects the result of the revert. + * + * @async + * @param {Repository} repo the repository that contains the given commits. + * @param {Commit} revert_commit the commit to revert + * @param {Commit} our_commit the commit to revert against (e.g. HEAD) + * @param {Number} mainline the parent of the revert commit, if it is a merge + * @param {MergeOptions} merge_options the merge options (or null for defaults) + * + * @return {Index} the index result + */ +Revert.commit = function( + repo, + revert_commit, + our_commit, + mainline, + merge_options, + callback +) +{ + merge_options = normalizeOptions(merge_options, NodeGit.MergeOptions); + + return _commit.call( + this, + repo, + revert_commit, + our_commit, + mainline, + merge_options + ) + .then(function(result) { + if (typeof callback === "function") { + callback(null, result); + } + + return result; + }, callback); +}; diff --git a/lib/revwalk.js b/lib/revwalk.js index bd48b7481..80205fb22 100644 --- a/lib/revwalk.js +++ b/lib/revwalk.js @@ -1,29 +1,108 @@ -var git = require('../'), - RevWalk = git.RevWalk; +var NodeGit = require("../"); +var Revwalk = NodeGit.Revwalk; -var oldSorting = RevWalk.prototype.sorting; +Object.defineProperty(Revwalk.prototype, "repo", { + get: function () { return this.repository(); } +}); +var _sorting = Revwalk.prototype.sorting; /** - * Refer to vendor/libgit2/include/git2/revwalk.h for sort definitions. + * @typedef historyEntry + * @type {Object} + * @property {Commit} commit the commit for this entry + * @property {Number} status the status of the file in the commit + * @property {String} newName the new name that is provided when status is + * renamed + * @property {String} oldName the old name that is provided when status is + * renamed */ -RevWalk.Sort = { - None: 0, - Topological: 1, - Time: 2, - Reverse: 4 +var fileHistoryWalk = Revwalk.prototype.fileHistoryWalk; +/** + * @param {String} filePath + * @param {Number} max_count + * @async + * @return {Array} + */ +Revwalk.prototype.fileHistoryWalk = fileHistoryWalk; + +/** + * Get a number of commits. + * + * @async + * @param {Number} count (default: 10) + * @return {Array} + */ +Revwalk.prototype.getCommits = function(count) { + count = count || 10; + var promises = []; + var walker = this; + + function walkCommitsCount(count) { + if (count === 0) { return; } + + return walker.next().then(function(oid) { + promises.push(walker.repo.getCommit(oid)); + return walkCommitsCount(count - 1); + }) + .catch(function(error) { + if (error.errno !== NodeGit.Error.CODE.ITEROVER) { + throw error; + } + }); + } + + return walkCommitsCount(count).then(function() { + return Promise.all(promises); + }); +}; + +/** + * Walk the history grabbing commits until the checkFn called with the + * current commit returns false. + * + * @async + * @param {Function} checkFn function returns false to stop walking + * @return {Array} + */ +Revwalk.prototype.getCommitsUntil = function(checkFn) { + var commits = []; + var walker = this; + + function walkCommitsCb() { + return walker.next().then(function(oid) { + return walker.repo.getCommit(oid).then(function(commit) { + commits.push(commit); + if (checkFn(commit)) { + return walkCommitsCb(); + } + }); + }) + .catch(function(error) { + if (error.errno !== NodeGit.Error.CODE.ITEROVER) { + throw error; + } + }); + } + + return walkCommitsCb().then(function() { + return commits; + }); }; /** * Set the sort order for the revwalk. This function takes variable arguments - * like `revwalk.sorting(git.RevWalk.Topological, git.RevWalk.Reverse).` + * like `revwalk.sorting(NodeGit.RevWalk.Topological, NodeGit.RevWalk.Reverse).` * * @param {Number} sort */ -RevWalk.prototype.sorting = function() { +Revwalk.prototype.sorting = function() { var sort = 0; - for (var i = 0; i < arguments.length; i++) + + for (var i = 0; i < arguments.length; i++) { sort |= arguments[i]; - oldSorting.call(this, sort); + } + + _sorting.call(this, sort); }; /** @@ -34,24 +113,30 @@ RevWalk.prototype.sorting = function() { * @param {Function} callback * @return {Commit} */ -RevWalk.prototype.walk = function(oid, callback) { - var self = this; - this.push(oid, function revWalkPush(error) { - if (error) return callback(error); +Revwalk.prototype.walk = function(oid, callback) { + var revwalk = this; + + this.push(oid); - function walk() { - self.next(function revWalkNext(error, oid) { - if (error) return callback(error); - if (!oid) return callback(); + function walk() { + revwalk.next().done(function(oid) { + if (!oid) { + if (typeof callback === "function") { + return callback(); + } - self.repo.getCommit(oid, function revWalkCommitLookup(error, commit) { - if (error) return callback(error); + return; + } + revwalk.repo.getCommit(oid).then(function(commit) { + if (typeof callback === "function") { callback(null, commit); - walk(); - }); + } + + walk(); }); - } - walk(); - }); + }, callback); + } + + walk(); }; diff --git a/lib/signature.js b/lib/signature.js index 652793a07..7fc8e274d 100644 --- a/lib/signature.js +++ b/lib/signature.js @@ -1,9 +1,10 @@ -var git = require('../'), - Signature = git.Signature; +var NodeGit = require("../"); +var Signature = NodeGit.Signature; /** * Standard string representation of an author. - * @return {String} + * + * @return {string} Representation of the author. */ Signature.prototype.toString = function() { return this.name().toString() + " <" + this.email().toString() + ">"; diff --git a/lib/stash.js b/lib/stash.js new file mode 100644 index 000000000..7067b24ce --- /dev/null +++ b/lib/stash.js @@ -0,0 +1,62 @@ +var NodeGit = require("../"); +var normalizeOptions = NodeGit.Utils.normalizeOptions; +var shallowClone = NodeGit.Utils.shallowClone; +var Stash = NodeGit.Stash; + +var _apply = Stash.apply; +var _foreach = Stash.foreach; +var _pop = Stash.pop; + +Stash.apply = function(repo, index, options) { + var checkoutOptions; + + if (options) { + options = shallowClone(options); + checkoutOptions = options.checkoutOptions; + delete options.checkoutOptions; + } else { + options = {}; + } + + options = normalizeOptions(options, NodeGit.StashApplyOptions); + + if (checkoutOptions) { + options.checkoutOptions = + normalizeOptions(checkoutOptions, NodeGit.CheckoutOptions); + } + + return _apply(repo, index, options); +}; + +// Override Stash.foreach to eliminate the need to pass null payload +Stash.foreach = function(repo, callback) { + function wrappedCallback(index, message, oid) { + // We need to copy the OID since libgit2 types are getting cleaned up + // incorrectly right now in callbacks + + return callback(index, message, oid.copy()); + } + + return _foreach(repo, wrappedCallback, null); +}; + +Stash.pop = function(repo, index, options) { + var checkoutOptions; + + if (options) { + options = shallowClone(options); + checkoutOptions = options.checkoutOptions; + delete options.checkoutOptions; + } else { + options = {}; + } + + options = normalizeOptions(options, NodeGit.StashApplyOptions); + + if (checkoutOptions) { + options.checkoutOptions = + normalizeOptions(checkoutOptions, NodeGit.CheckoutOptions); + } + + return _pop(repo, index, options); +}; diff --git a/lib/status.js b/lib/status.js new file mode 100644 index 000000000..c5c762baa --- /dev/null +++ b/lib/status.js @@ -0,0 +1,18 @@ +var NodeGit = require("../"); +var normalizeOptions = NodeGit.Utils.normalizeOptions; + +var Status = NodeGit.Status; + +var _foreach = Status.foreach; +var _foreachExt = Status.foreachExt; + +// Override Status.foreach to eliminate the need to pass null payload +Status.foreach = function(repo, callback) { + return _foreach(repo, callback, null); +}; + +// Override Status.foreachExt to normalize opts +Status.foreachExt = function(repo, opts, callback) { + opts = normalizeOptions(opts, NodeGit.StatusOptions); + return _foreachExt(repo, opts, callback, null); +}; diff --git a/lib/status_file.js b/lib/status_file.js new file mode 100644 index 000000000..ac6b124e3 --- /dev/null +++ b/lib/status_file.js @@ -0,0 +1,106 @@ +var NodeGit = require("../"); +var Status = NodeGit.Status; + +var StatusFile = function(args) { + var path = args.path; + var status = args.status; + var entry = args.entry; + + if (entry) { + status = entry.status(); + if (entry.indexToWorkdir()) { + path = entry.indexToWorkdir().newFile().path(); + } else { + path = entry.headToIndex().newFile().path(); + } + } + + var codes = Status.STATUS; + + var getStatus = function() { + var fileStatuses = []; + + for(var key in Status.STATUS) { + if (status & Status.STATUS[key]) { + fileStatuses.push(key); + } + } + + return fileStatuses; + }; + + var data = { + path: path, + entry: entry, + statusBit: status, + statuses: getStatus() + }; + + return { + headToIndex: function() { + if (data.entry) { + return entry.headToIndex(); + } else { + return undefined; + } + }, + indexToWorkdir: function() { + if (data.entry) { + return entry.indexToWorkdir(); + } else { + return undefined; + } + }, + inIndex: function() { + return status & codes.INDEX_NEW || + status & codes.INDEX_MODIFIED || + status & codes.INDEX_DELETED || + status & codes.INDEX_TYPECHANGE || + status & codes.INDEX_RENAMED; + }, + inWorkingTree: function() { + return status & codes.WT_NEW || + status & codes.WT_MODIFIED || + status & codes.WT_DELETED || + status & codes.WT_TYPECHANGE || + status & codes.WT_RENAMED; + }, + isConflicted: function() { + return status & codes.CONFLICTED; + }, + isDeleted: function() { + return status & codes.WT_DELETED || + status & codes.INDEX_DELETED; + }, + isIgnored: function() { + return status & codes.IGNORED; + }, + isModified: function() { + return status & codes.WT_MODIFIED || + status & codes.INDEX_MODIFIED; + }, + isNew: function() { + return status & codes.WT_NEW || + status & codes.INDEX_NEW; + }, + isRenamed: function() { + return status & codes.WT_RENAMED || + status & codes.INDEX_RENAMED; + }, + isTypechange: function() { + return status & codes.WT_TYPECHANGE || + status & codes.INDEX_TYPECHANGE; + }, + path: function() { + return data.path; + }, + status: function() { + return data.statuses; + }, + statusBit: function() { + return data.statusBit; + } + }; +}; + +NodeGit.StatusFile = StatusFile; diff --git a/lib/status_list.js b/lib/status_list.js new file mode 100644 index 000000000..efccbad2e --- /dev/null +++ b/lib/status_list.js @@ -0,0 +1,12 @@ +var NodeGit = require("../"); +var normalizeOptions = NodeGit.Utils.normalizeOptions; + +var StatusList = NodeGit.StatusList; + +var _create = StatusList.create; + +// Override StatusList.create to normalize opts +StatusList.create = function(repo, opts) { + opts = normalizeOptions(opts, NodeGit.StatusOptions); + return _create(repo, opts); +}; diff --git a/lib/submodule.js b/lib/submodule.js new file mode 100644 index 000000000..2b0cb530d --- /dev/null +++ b/lib/submodule.js @@ -0,0 +1,10 @@ +var NodeGit = require("../"); + +var Submodule = NodeGit.Submodule; + +var _foreach = Submodule.foreach; + +// Override Submodule.foreach to eliminate the need to pass null payload +Submodule.foreach = function(repo, callback) { + return _foreach(repo, callback, null); +}; diff --git a/lib/tag.js b/lib/tag.js new file mode 100644 index 000000000..bf8ddff49 --- /dev/null +++ b/lib/tag.js @@ -0,0 +1,12 @@ +var NodeGit = require("../"); +var LookupWrapper = NodeGit.Utils.lookupWrapper; +var Tag = NodeGit.Tag; + +/** +* Retrieves the tag pointed to by the oid +* @async +* @param {Repository} repo The repo that the tag lives in +* @param {String|Oid|Tag} id The tag to lookup +* @return {Tag} +*/ +Tag.lookup = LookupWrapper(Tag); diff --git a/lib/tree.js b/lib/tree.js index eada1462f..0ece24abc 100644 --- a/lib/tree.js +++ b/lib/tree.js @@ -1,20 +1,76 @@ -var git = require('../'), - Tree = git.Tree, - events = require('events'), - path = require('path'); +var path = require("path"); +var events = require("events"); +var NodeGit = require("../"); +var Diff = NodeGit.Diff; +var LookupWrapper = NodeGit.Utils.lookupWrapper; +var Tree = NodeGit.Tree; +var Treebuilder = NodeGit.Treebuilder; -var oldEntryByIndex = Tree.prototype.entryByIndex, - oldEntryByName = Tree.prototype.entryByName, - oldGetEntry = Tree.prototype.getEntry; +/** +* Retrieves the tree pointed to by the oid +* @async +* @param {Repository} repo The repo that the tree lives in +* @param {String|Oid|Tree} id The tree to lookup +* @param {Function} callback +* @return {Tree} +*/ +Tree.lookup = LookupWrapper(Tree); + +/** + * Make builder. This is helpful for modifying trees. + * @return {Treebuilder} + */ +Tree.prototype.builder = function() { + var builder = Treebuilder.create(this); + + builder.root = builder; + builder.repo = this.repo; + + return builder; +}; /** * Diff two trees + * @async + * @param {Tree} tree to diff against + * @param {Function} callback + * @return {DiffList} + */ +Tree.prototype.diff = function(tree, callback) { + return this.diffWithOptions(tree, null, callback); +}; + +/** + * Diff two trees with options + * @async * @param {Tree} tree to diff against + * @param {Object} options * @param {Function} callback * @return {DiffList} */ -Tree.prototype.diff = function(that, callback) { - this.diffTree(this.repo, that, null, callback); +Tree.prototype.diffWithOptions = function(tree, options, callback) { + return Diff.treeToTree(this.repo, tree, this, options).then(function(diff) { + if (typeof callback === "function") { + callback(null, diff); + } + + return diff; + }, callback); +}; + +/** + * Return an array of the entries in this tree (excluding its children). + * @return {Array} an array of TreeEntrys + */ +Tree.prototype.entries = function() { + var size = this.entryCount(); + var result = []; + + for (var i = 0; i < size; i++) { + result.push(this.entryByIndex(i)); + } + + return result; }; /** @@ -24,7 +80,7 @@ Tree.prototype.diff = function(that, callback) { * @return {TreeEntry} */ Tree.prototype.entryByIndex = function(i) { - var entry = oldEntryByIndex.call(this, i); + var entry = this._entryByIndex(i); entry.parent = this; return entry; }; @@ -36,7 +92,7 @@ Tree.prototype.entryByIndex = function(i) { * @return {TreeEntry} */ Tree.prototype.entryByName = function(name) { - var entry = oldEntryByName.call(this, name); + var entry = this._entryByName(name); entry.parent = this; return entry; }; @@ -45,65 +101,72 @@ Tree.prototype.entryByName = function(name) { * Get an entry at a path. Unlike by name, this takes a fully * qualified path, like `/foo/bar/baz.javascript` * - * @param {String} path + * @param {String} filePath * @return {TreeEntry} */ -Tree.prototype.getEntry = function(path, callback) { - // FIXME: this method ought to implement the recursion directly, rather than - // rely on oldGetEntry, in order to ensure that `parent` pointers are direct. - var self = this; - oldGetEntry.call(this, path, function(error, entry) { - if (error) return callback(error); +Tree.prototype.getEntry = function(filePath, callback) { + var tree = this; - entry.parent = self; - entry.path = function() { return path }; - callback(null, entry); + return this.entryByPath(filePath).then(function(entry) { + entry.parent = tree; + entry.dirtoparent = path.dirname(filePath); + + if (typeof callback === "function") { + callback(null, entry); + } + + return entry; }); }; /** - * Return an array of the entries in this tree (excluding its children). - * @return {[TreeEntry]} an array of TreeEntrys + * Return the path of this tree, like `/lib/foo/bar` + * @return {String} */ -Tree.prototype.entries = function() { - var size = this.size(), - result = []; - for (var i = 0; i < size; i++) { - result.push(this.entryByIndex(i)); - } - return result; +Tree.prototype.path = function(blobsOnly) { + return this.entry ? this.entry.path() : ""; }; /** - * Recursively walk the tree in breadth-first order. Fires an event for each entry. + * Recursively walk the tree in breadth-first order. Fires an event for each + * entry. * - * @fires Tree#entry - * @fires Tree#end + * @fires EventEmitter#entry Tree + * @fires EventEmitter#end Array + * @fires EventEmitter#error Error * - * @param {Boolean} [blobsOnly = true] True to emit only blob & blob executable entries. + * @param {Boolean} [blobsOnly = true] True to emit only blob & blob executable + * entries. * * @return {EventEmitter} */ Tree.prototype.walk = function(blobsOnly) { - if (typeof blobsOnly == 'undefined') blobsOnly = true; + blobsOnly = typeof blobsOnly === "boolean" ? blobsOnly : true; - var self = this, - event = new events.EventEmitter(), - entries = [], - errors = []; + var self = this; + var event = new events.EventEmitter(); var total = 1; + var entries = new Set(); + var finalEntires = []; // This looks like a DFS, but it is a BFS because of implicit queueing in // the recursive call to `entry.getTree(bfs)` function bfs(error, tree) { total--; - if (error) return errors.push(error); - tree.entries().forEach(function (entry) { - if (!blobsOnly || entry.isFile()) { - event.emit('entry', entry); - entries.push(entry); + if (error) { + return event.emit("error", error); + } + + tree.entries().forEach(function (entry, entryIndex) { + if (!blobsOnly || entry.isFile() && !entries.has(entry)) { + event.emit("entry", entry); + entries.add(entry); + + // Node 0.12 doesn't support either [v for (v of entries)] nor + // Array.from so we'll just maintain our own list. + finalEntires.push(entry); } if (entry.isTree()) { @@ -111,8 +174,10 @@ Tree.prototype.walk = function(blobsOnly) { entry.getTree(bfs); } }); - if (total === 0) - event.emit('end', errors.length ? errors : null, entries); + + if (total === 0) { + event.emit("end", finalEntires); + } } event.start = function() { @@ -121,23 +186,3 @@ Tree.prototype.walk = function(blobsOnly) { return event; }; - -/** - * Return the path of this tree, like `/lib/foo/bar` - * @return {String} - */ -Tree.prototype.path = function(blobsOnly) { - return this.entry ? this.entry.path() : ''; -}; - -/** - * Make builder. This is helpful for modifying trees. - * @return {TreeBuilder} - */ -var oldBuilder = Tree.prototype.builder; -Tree.prototype.builder = function() { - var builder = oldBuilder.call(this); - builder.root = builder; - builder.repo = this.repo; - return builder; -}; diff --git a/lib/tree_builder.js b/lib/tree_builder.js deleted file mode 100644 index c3570c14c..000000000 --- a/lib/tree_builder.js +++ /dev/null @@ -1,144 +0,0 @@ -var git = require('../'), - TreeBuilder = git.TreeBuilder, - TreeEntry = git.TreeEntry, - path = require('path'); - -var oldInsert = TreeBuilder.prototype.insert; - -/** - * Insert an object into this tree by oid - * - * @param {String} filename - * @param {Oid} oid - * @param {Number} filemode - */ -TreeBuilder.prototype.insert = function(filename, oid, filemode) { - if (!this.insertions) this.insertions = []; - - this.insertions.push([filename, oid, filemode]); -}; - -/** - * Insert a blob into this tree - * - * @param {String} filename - * @param {Blob} blob - * @param {Boolean} isExecutable - */ -TreeBuilder.prototype.insertBlob = function(filename, blob, isExecutable) { - if (!this.blobs) this.blobs = []; - - this.blobs.push([filename, blob, isExecutable ? TreeEntry.FileMode.Executable : TreeEntry.FileMode.Blob]); -}; - -var oldWrite = TreeBuilder.prototype.write; - -/** - * Write this tree to the repo. - * - * @param {Function} callback - */ -TreeBuilder.prototype.write = function(callback) { - var self = this; - this.doInsertions(function(error) { - if (error) return callback(error); - - if (self.builders && self.builders.length) { - writeNextLevel(self.repo, self.builders, function(error, previousName, previousTreeId) { - if (previousName && previousTreeId) { - oldInsert.call(self, previousName, previousTreeId, TreeEntry.FileMode.Tree); - } - oldWrite.call(self, self.repo, callback); - }); - } else { - oldWrite.call(self, self.repo, callback); - } - }) -}; - -TreeBuilder.prototype.doInsertions = function(callback) { - var self = this; - - this.createBlobs(function(error) { - if (error) return callback(error); - - self.doOidInsertions(callback); - }) -}; - -TreeBuilder.prototype.createBlobs = function(callback) { - if (!this.blobs || !this.blobs.length) return callback(); - - var self = this, - data = this.blobs.pop(), - path = data[0], buffer = data[1], filemode = data[2]; - - this.repo.createBlobFromBuffer(buffer, function(error, blobId) { - if (error) return callback(error); - - self.insert(path, blobId, filemode); - self.createBlobs(callback); - }); -} - -TreeBuilder.prototype.doOidInsertions = function(callback) { - if (!this.insertions || !this.insertions.length) return callback(); - - var self = this, - data = this.insertions.pop(), - filename = data[0], oid = data[1], filemode = data[2], - parts = filename.split(path.sep), - pathParts = parts.slice(0, parts.length - 1), - filename = parts[parts.length - 1]; - - insertOneLevel(this, pathParts, function(error, builder) { - if (error) return callback(error); - oldInsert.call(builder, filename, oid, filemode); - self.doOidInsertions(callback); - }); -}; - -function writeNextLevel(repo, builders, callback, previousName, previousTreeId) { - var builder = builders.pop(); - if (!builder) return callback(null, previousName, previousTreeId); - - if (previousName && previousTreeId) { - oldInsert.call(builder, previousName, previousTreeId, TreeEntry.FileMode.Tree); - } - oldWrite.call(builder, repo, function(error, previousTreeId) { - if (error) return callback(error); - - previousName = builder.name; - writeNextLevel(repo, builders, callback, previousName, previousTreeId); - }); -} - -function insertOneLevel(builder, parts, callback) { - if (!parts.length) return callback(null, builder); - - var part = parts[0], rest = parts.slice(1, parts.length); - if (!part) return insertOneLevel(builder, rest, callback); - - if (!builder.root.builders) builder.root.builders = []; - var entry = builder.get(part); - if (entry) { - if (!entry.isTree()) return callback("Invalid path part " + part); - entry.parent = builder; - - entry.getTree(function(error, tree) { - if (error) return callback(error); - - var next = tree.builder(); - next.name = part; - next.root = builder.root; - builder.root.builders.push(next); - insertOneLevel(next, rest, callback); - }); - } else { - var next = TreeBuilder.create(); - next.name = part; - next.root = builder.root; - builder.root.builders.push(next); - insertOneLevel(next, rest, callback); - } -} \ No newline at end of file diff --git a/lib/tree_entry.js b/lib/tree_entry.js index 02f29b315..a65d85172 100644 --- a/lib/tree_entry.js +++ b/lib/tree_entry.js @@ -1,79 +1,88 @@ -var git = require('../'), - TreeEntry = git.TreeEntry, - path = require('path'); +var path = require("path"); +var NodeGit = require("../"); +var TreeEntry = NodeGit.TreeEntry; /** - * Refer to vendor/libgit2/include/git2/types.h for filemode definitions. - * - * @readonly - * @enum {Integer} + * Retrieve the blob for this entry. Make sure to call `isBlob` first! + * @async + * @return {Blob} */ -TreeEntry.FileMode = { - /** 0000000 */ New: 0, - /** 0040000 */ Tree: 16384, - /** 0100644 */ Blob: 33188, - /** 0100755 */ Executable: 33261, - /** 0120000 */ Link: 40960, - /** 0160000 */ Commit: 57344 +TreeEntry.prototype.getBlob = function(callback) { + return this.parent.repo.getBlob(this.id()).then(function(blob) { + if (typeof callback === "function") { + callback(null, blob); + } + + return blob; + }, callback); }; /** - * Is this TreeEntry a blob? (i.e., a file) - * @return {Boolean} + * Retrieve the tree for this entry. Make sure to call `isTree` first! + * @async + * @return {Tree} */ -TreeEntry.prototype.isFile = function() { - return this.filemode() === TreeEntry.FileMode.Blob || - this.filemode() === TreeEntry.FileMode.Executable; +TreeEntry.prototype.getTree = function(callback) { + var entry = this; + + return this.parent.repo.getTree(this.id()).then(function(tree) { + tree.entry = entry; + + if (typeof callback === "function") { + callback(null, tree); + } + + return tree; + }, callback); }; /** - * Is this TreeEntry a tree? (i.e., a directory) + * Is this TreeEntry a blob? Alias for `isFile` * @return {Boolean} */ -TreeEntry.prototype.isTree = function() { - return this.filemode() === TreeEntry.FileMode.Tree; +TreeEntry.prototype.isBlob = function() { + return this.isFile(); }; /** * Is this TreeEntry a directory? Alias for `isTree` * @return {Boolean} */ -TreeEntry.prototype.isDirectory = TreeEntry.prototype.isTree; +TreeEntry.prototype.isDirectory = function() { + return this.isTree(); +}; /** - * Is this TreeEntry a blob? Alias for `isFile` + * Is this TreeEntry a blob? (i.e., a file) * @return {Boolean} */ -TreeEntry.prototype.isBlob = TreeEntry.prototype.isFile; +TreeEntry.prototype.isFile = function() { + return this.filemode() === TreeEntry.FILEMODE.BLOB || + this.filemode() === TreeEntry.FILEMODE.EXECUTABLE; +}; /** - * Retrieve the SHA for this TreeEntry. - * @return {String} + * Is this TreeEntry a submodule? + * @return {Boolean} */ -TreeEntry.prototype.sha = function() { - return this.oid().sha(); +TreeEntry.prototype.isSubmodule = function() { + return this.filemode() === TreeEntry.FILEMODE.COMMIT; }; /** - * Retrieve the tree for this entry. Make sure to call `isTree` first! - * @return {Tree} + * Is this TreeEntry a tree? (i.e., a directory) + * @return {Boolean} */ -TreeEntry.prototype.getTree = function(callback) { - var self = this; - this.parent.repo.getTree(this.oid(), function(error, tree) { - if (error) return callback(error); - - tree.entry = self; - callback(null, tree); - }); +TreeEntry.prototype.isTree = function() { + return this.filemode() === TreeEntry.FILEMODE.TREE; }; /** - * Retrieve the tree for this entry. Make sure to call `isTree` first! - * @return {Blob} + * Retrieve the SHA for this TreeEntry. Alias for `sha` + * @return {String} */ -TreeEntry.prototype.getBlob = function(callback) { - this.parent.repo.getBlob(this.oid(), callback); +TreeEntry.prototype.oid = function() { + return this.sha(); }; /** @@ -81,7 +90,16 @@ TreeEntry.prototype.getBlob = function(callback) { * @return {String} */ TreeEntry.prototype.path = function(callback) { - return path.join(this.parent.path(), this.name()); + var dirtoparent = this.dirtoparent || ""; + return path.join(this.parent.path(), dirtoparent, this.name()); +}; + +/** + * Retrieve the SHA for this TreeEntry. + * @return {String} + */ +TreeEntry.prototype.sha = function() { + return this.id().toString(); }; /** diff --git a/lib/util.js b/lib/util.js deleted file mode 100644 index 096a34291..000000000 --- a/lib/util.js +++ /dev/null @@ -1,25 +0,0 @@ -var git = require('../'); - -exports.makeSafe = function(object, key) { - var oldFn = object[key]; - object[key] = function() { - try { - oldFn.apply(this, arguments); - } catch (e) { - var callback = arguments[arguments.length - 1]; - callback(e); - } - }; -}; - -exports.normalizeOid = function(object, key) { - var oldFn = object[key]; - object[key] = function() { - var oid = arguments[0]; - if (typeof oid === 'string') oid = git.Oid.fromString(oid); - var newArguments = [oid]; - for (var i = 1; i < arguments.length; i++) - newArguments[i] = arguments[i]; - oldFn.apply(this, newArguments); - }; -}; diff --git a/lib/utils/lookup_wrapper.js b/lib/utils/lookup_wrapper.js new file mode 100644 index 000000000..eee4dd768 --- /dev/null +++ b/lib/utils/lookup_wrapper.js @@ -0,0 +1,39 @@ +var NodeGit = require("../../"); + +/** +* Wraps a method so that you can pass in either a string, OID or the object +* itself and you will always get back a promise that resolves to the object. +* @param {Object} objectType The object type that you're expecting to receive. +* @param {Function} lookupFunction The function to do the lookup for the +* object. Defaults to `objectType.lookup`. +* @return {Function} +*/ +function lookupWrapper(objectType, lookupFunction) { + lookupFunction = lookupFunction || objectType.lookup; + + return function(repo, id, callback) { + if (id instanceof objectType) { + return Promise.resolve(id).then(function(obj) { + obj.repo = repo; + + if (typeof callback === "function") { + callback(null, obj); + } + + return obj; + }, callback); + } + + return lookupFunction(repo, id).then(function(obj) { + obj.repo = repo; + + if (typeof callback === "function") { + callback(null, obj); + } + + return obj; + }, callback); + }; +} + +NodeGit.Utils.lookupWrapper = lookupWrapper; diff --git a/lib/utils/normalize_options.js b/lib/utils/normalize_options.js new file mode 100644 index 000000000..a1a46255c --- /dev/null +++ b/lib/utils/normalize_options.js @@ -0,0 +1,29 @@ +var NodeGit = require("../../"); + +/** + * Normalize an object to match a struct. + * + * @param {String, Object} oid - The oid string or instance. + * @return {Object} An Oid instance. + */ +function normalizeOptions(options, Ctor) { + if (!options) { + return null; + } + + if (options instanceof Ctor) { + return options; + } + + var instance = new Ctor(); + + Object.keys(options).forEach(function(key) { + if (typeof options[key] !== "undefined") { + instance[key] = options[key]; + } + }); + + return instance; +} + +NodeGit.Utils.normalizeOptions = normalizeOptions; diff --git a/lib/utils/shallow_clone.js b/lib/utils/shallow_clone.js new file mode 100644 index 000000000..0dbe4113e --- /dev/null +++ b/lib/utils/shallow_clone.js @@ -0,0 +1,14 @@ +var NodeGit = require("../../"); + +function shallowClone() { + var merges = Array.prototype.slice.call(arguments); + + return merges.reduce(function(obj, merge) { + return Object.keys(merge).reduce(function(obj, key) { + obj[key] = merge[key]; + return obj; + }, obj); + }, {}); +} + +NodeGit.Utils.shallowClone = shallowClone; diff --git a/lifecycleScripts/clean.js b/lifecycleScripts/clean.js new file mode 100644 index 000000000..a0f7076b3 --- /dev/null +++ b/lifecycleScripts/clean.js @@ -0,0 +1,5 @@ +var cleanForPublish = require("clean-for-publish"); +var path = require("path"); + +var location = path.join(__dirname, ".."); +cleanForPublish(location); diff --git a/lifecycleScripts/configureLibssh2.js b/lifecycleScripts/configureLibssh2.js new file mode 100644 index 000000000..23acd1ff0 --- /dev/null +++ b/lifecycleScripts/configureLibssh2.js @@ -0,0 +1,47 @@ +var cp = require("child_process"); +var path = require("path"); +var rooted = path.join.bind(path, __dirname, ".."); + +module.exports = function retrieveExternalDependencies() { + if (process.platform === "win32") { + return Promise.resolve(""); + } + + return new Promise(function(resolve, reject) { + console.info("[nodegit] Configuring libssh2."); + var opensslDir = rooted("vendor/openssl/openssl"); + var newEnv = {}; + Object.keys(process.env).forEach(function(key) { + newEnv[key] = process.env[key]; + }); + newEnv.CPPFLAGS = newEnv.CPPFLAGS || ""; + newEnv.CPPFLAGS += " -I" + path.join(opensslDir, "include"); + newEnv.CPPFLAGS = newEnv.CPPFLAGS.trim(); + + cp.exec( + rooted("vendor/libssh2/configure") + + " --with-libssl-prefix=" + opensslDir, + {cwd: rooted("vendor/libssh2/"), env: newEnv}, + function(err, stdout, stderr) { + if (err) { + console.error(err); + console.error(stderr); + reject(err, stderr); + } + else { + resolve(stdout); + } + } + ); + }); +}; + +// Called on the command line +if (require.main === module) { + if (process.platform === "win32") { + console.log("nothing to do"); + } + else { + module.exports().done(); + } +} diff --git a/lifecycleScripts/install.js b/lifecycleScripts/install.js new file mode 100644 index 000000000..16b047120 --- /dev/null +++ b/lifecycleScripts/install.js @@ -0,0 +1,62 @@ +var buildFlags = require("../utils/buildFlags"); +var spawn = require("child_process").spawn; + +module.exports = function install() { + console.log("[nodegit] Running install script"); + + var nodePreGyp = "node-pre-gyp"; + + if (process.platform === "win32") { + nodePreGyp += ".cmd"; + } + + var args = ["install"]; + + if (buildFlags.mustBuild) { + console.info( + "[nodegit] Pre-built download disabled, building from source." + ); + args.push("--build-from-source"); + + if (buildFlags.debugBuild) { + console.info("[nodegit] Building debug version."); + args.push("--debug"); + } + } + else { + args.push("--fallback-to-build"); + } + + return new Promise(function(resolve, reject) { + var spawnedNodePreGyp = spawn(nodePreGyp, args); + + spawnedNodePreGyp.stdout.on("data", function(data) { + console.info(data.toString().trim()); + }); + + spawnedNodePreGyp.stderr.on("data", function(data) { + console.error(data.toString().trim()); + }); + + spawnedNodePreGyp.on("close", function(code) { + if (!code) { + resolve(); + } else { + reject(code); + } + }); + }) + .then(function() { + console.info("[nodegit] Completed installation successfully."); + }); +}; + +// Called on the command line +if (require.main === module) { + module.exports() + .catch(function(e) { + console.error("[nodegit] ERROR - Could not finish install"); + console.error("[nodegit] ERROR - finished with error code: " + e); + process.exit(e); + }); +} diff --git a/lifecycleScripts/postinstall.js b/lifecycleScripts/postinstall.js new file mode 100755 index 000000000..91e7c2a3c --- /dev/null +++ b/lifecycleScripts/postinstall.js @@ -0,0 +1,83 @@ +var fse = require("fs-extra"); +var path = require("path"); + +var exec = require("../utils/execPromise"); +var buildFlags = require("../utils/buildFlags"); + +var rootPath = path.join(__dirname, ".."); + +function printStandardLibError() { + console.log( + "[nodegit] ERROR - the latest libstdc++ is missing on your system!" + ); + console.log(""); + console.log("On Ubuntu you can install it using:"); + console.log(""); + console.log("$ sudo add-apt-repository ppa:ubuntu-toolchain-r/test"); + console.log("$ sudo apt-get update"); + console.log("$ sudo apt-get install libstdc++-4.9-dev"); +} + +module.exports = function install() { + if (buildFlags.isGitRepo) { + // If we're building NodeGit from a git repo we aren't going to do any + // cleaning up + return Promise.resolve(); + } + if (buildFlags.isElectron || buildFlags.isNWjs) { + // If we're building for electron or NWjs, we're unable to require the + // built library so we have to just assume success, unfortunately. + return Promise.resolve(); + } + + return exec("node " + path.join(rootPath, "dist/nodegit.js")) + .catch(function(e) { + if (~e.toString().indexOf("Module version mismatch")) { + console.warn( + "[nodegit] WARN - NodeGit was built for a different version of node." + ); + console.warn( + "If you are building NodeGit for electron/nwjs you can " + + "ignore this warning." + ); + } + else { + throw e; + } + }) + .then(function() { + // Is we're using NodeGit from a package manager then let's clean up after + // ourselves when we install successfully. + if (!buildFlags.mustBuild) { + // We can't remove the source files yet because apparently the + // "standard workflow" for native node moduels in Electron/nwjs is to + // build them for node and then nah eff that noise let's rebuild them + // again for the actual platform! Hurray!!! When that madness is dead + // we can clean up the source which is a serious amount of data. + // fse.removeSync(path.join(rootPath, "vendor")); + // fse.removeSync(path.join(rootPath, "src")); + // fse.removeSync(path.join(rootPath, "include")); + + fse.removeSync(path.join(rootPath, "build/Release/*.a")); + fse.removeSync(path.join(rootPath, "build/Release/obj.target")); + } + }); +}; + +// Called on the command line +if (require.main === module) { + module.exports() + .catch(function(e) { + console.warn("[nodegit] WARN - Could not finish postinstall"); + + if ( + process.pladtform === "linux" && + ~e.toString().indexOf("libstdc++") + ) { + printStandardLibError(); + } + else { + console.log(e); + } + }); +} diff --git a/lifecycleScripts/preinstall.js b/lifecycleScripts/preinstall.js new file mode 100644 index 000000000..4d712aa83 --- /dev/null +++ b/lifecycleScripts/preinstall.js @@ -0,0 +1,43 @@ +var path = require("path"); +var local = path.join.bind(path, __dirname); + +var exec = require(local("../utils/execPromise")); +var configure = require(local("configureLibssh2")); +var buildFlags = require(local("../utils/buildFlags")); + +module.exports = function prepareForBuild() { + console.log("[nodegit] Running pre-install script"); + + return exec("npm -v") + .then(function(npmVersion) { + if (npmVersion.split(".")[0] < 3) { + console.log("[nodegit] npm@2 installed, pre-loading required packages"); + return exec("npm install --ignore-scripts"); + } + + return Promise.resolve(); + }) + .then(function() { + return configure(); + }) + .then(function() { + if (buildFlags.isGitRepo) { + var submodules = require(local("submodules")); + var generate = require(local("../generate")); + return submodules() + .then(function() { + return generate(); + }); + } + }); +}; + +// Called on the command line +if (require.main === module) { + module.exports() + .catch(function(e) { + console.error("[nodegit] ERROR - Could not finish preinstall"); + console.error(e); + process.exit(1); + }); +} diff --git a/lifecycleScripts/submodules/getStatus.js b/lifecycleScripts/submodules/getStatus.js new file mode 100644 index 000000000..2fcc42d37 --- /dev/null +++ b/lifecycleScripts/submodules/getStatus.js @@ -0,0 +1,50 @@ +var path = require("path"); +var rootDir = path.join(__dirname, "../.."); +var exec = require(path.join(rootDir, "./utils/execPromise")); + +module.exports = function getStatus() { + return exec("git submodule status", { cwd: rootDir}) + .then(function(stdout) { + if (!stdout) { + // In the case where we pull from npm they pre-init the submodules for + // us and `git submodule status` returns empty-string. In that case + // we'll just assume that we're good. + return Promise.resolve([]); + } + + function getStatusPromiseFromLine(line) { + var lineSections = line.trim().split(" "); + var onNewCommit = !!~lineSections[0].indexOf("+"); + var needsInitialization = !!~lineSections[0].indexOf("-"); + var commitOid = lineSections[0].replace("+", "").replace("-", ""); + var name = lineSections[1]; + + return exec("git status", { cwd: path.join(rootDir, name)}) + .then(function(workDirStatus) { + return { + commitOid: commitOid, + onNewCommit: onNewCommit, + name: name, + needsInitialization: needsInitialization, + workDirDirty: !~workDirStatus + .trim() + .split("\n") + .pop() + .indexOf("nothing to commit") + }; + }); + } + + return Promise.all(stdout + .trim() + .split("\n") + .map(getStatusPromiseFromLine) + ); + }) + .catch(function() { + // In the case that NodeGit is required from another project via npm we + // won't be able to run submodule commands but that's ok since the + // correct version of libgit2 is published with nodegit. + return Promise.resolve([]); + }); +}; diff --git a/lifecycleScripts/submodules/index.js b/lifecycleScripts/submodules/index.js new file mode 100644 index 000000000..17a2e5565 --- /dev/null +++ b/lifecycleScripts/submodules/index.js @@ -0,0 +1,77 @@ +var path = require("path"); +var rootDir = path.join(__dirname, "../.."); + +var gitExecutableLocation = require( + path.join(rootDir, "./utils/gitExecutableLocation") +); +var submoduleStatus = require("./getStatus"); + +var exec = require(path.join(rootDir, "./utils/execPromise")); + +module.exports = function submodules() { + return gitExecutableLocation() + .catch(function() { + console.error("[nodegit] ERROR - Compilation of NodeGit requires git " + + "CLI to be installed and on the path"); + + throw new Error("git CLI is not installed or not on the path"); + }) + .then(function() { + console.log("[nodegit] Checking submodule status"); + return submoduleStatus(); + }) + .then(function(statuses) { + function printSubmodule(submoduleName) { + console.log("\t" + submoduleName); + } + + var dirtySubmodules = statuses + .filter(function(status) { + return status.workDirDirty && !status.needsInitialization; + }) + .map(function(dirtySubmodule) { + return dirtySubmodule.name; + }); + + if (dirtySubmodules.length) { + console.error( + "[nodegit] ERROR - Some submodules have uncommited changes:" + ); + dirtySubmodules.forEach(printSubmodule); + console.error( + "\nThey must either be committed or discarded before we build" + ); + + throw new Error("Dirty Submodules: " + dirtySubmodules.join(" ")); + } + + var outOfSyncSubmodules = statuses + .filter(function(status) { + return status.onNewCommit && !status.needsInitialization; + }) + .map(function(outOfSyncSubmodule) { + return outOfSyncSubmodule.name; + }); + + if (outOfSyncSubmodules.length) { + console.warn( + "[nodegit] WARNING - Some submodules are pointing to an new commit:" + ); + outOfSyncSubmodules.forEach(printSubmodule); + console.warn("\nThey will not be updated."); + } + + return Promise.all(statuses + .filter(function(status) { + return !status.onNewCommit; + }) + .map(function(submoduleToUpdate) { + console.log("[nodegit] Initializing submodules"); + + return exec( + "git submodule update --init --recursive " + submoduleToUpdate.name + ); + }) + ); + }); +}; diff --git a/package.json b/package.json index 94687fa4e..30c2158fb 100644 --- a/package.json +++ b/package.json @@ -1,9 +1,8 @@ { "name": "nodegit", "description": "Node.js libgit2 asynchronous native bindings", - "version": "0.1.2", - "libgit2": "e953c1606d0d7aea680c9b19db0b955b34ae63c2", - "homepage": "https://github.com/tbranyen/nodegit", + "version": "0.16.0", + "homepage": "http://nodegit.org", "keywords": [ "libgit2", "git2", @@ -11,45 +10,81 @@ "native" ], "license": "MIT", - "author": "Tim Branyen (http://twitter.com/tbranyen)", + "author": "Tim Branyen (@tbranyen)", "contributors": [ { - "name": "Michael Robinson", - "email": "mike@pagesofinterest.net" + "name": "John Haley", + "email": "john@haley.io" + }, + { + "name": "Max Korp", + "email": "maxkorp@8bytealchemy.com" } ], - "main": "index.js", + "main": "dist/nodegit.js", "repository": { "type": "git", - "url": "git://github.com/tbranyen/nodegit.git" + "url": "git://github.com/nodegit/nodegit.git" }, "directories": { "build": "./build", "lib": "./lib" }, "engines": { - "node": ">= 0.8" + "node": ">= 0.12" }, "dependencies": { - "request": "~2.25.0", - "node-gyp": "~0.13.0", - "tar": "~0.1.18", - "which": "~1.0.5", - "q": "~0.9.6", - "fs-extra": "0.6.0", - "nan": "0.8.0", - "rimraf": "~2.2.6" + "fs-extra": "~0.26.2", + "lodash": "^4.13.1", + "nan": "^2.2.0", + "node-gyp": "^3.3.1", + "node-pre-gyp": "~0.6.15", + "promisify-node": "~0.3.0" }, "devDependencies": { - "jshint": "~2.4.4", - "nodeunit": "~0.8.6", - "ejs": "~1.0.0", - "async": "~0.2.10" + "aws-sdk": "^2.3.19", + "babel-cli": "^6.7.7", + "babel-preset-es2015": "^6.6.0", + "clean-for-publish": "~1.0.2", + "combyne": "~0.8.1", + "coveralls": "~2.11.4", + "istanbul": "~0.3.20", + "js-beautify": "~1.5.10", + "jshint": "~2.8.0", + "lcov-result-merger": "~1.0.2", + "mocha": "~2.3.4" + }, + "vendorDependencies": { + "libssh2": "1.7.0", + "http_parser": "2.5.0" + }, + "binary": { + "module_name": "nodegit", + "module_path": "./build/Release/", + "host": "https://nodegit.s3.amazonaws.com/nodegit/nodegit/" }, "scripts": { - "lint": "jshint src", - "install": "node install.js", - "test": "cd test && nodeunit nodegit.js", - "codegen": "node build/codegen/generate.js" + "babel": "babel --presets es2015 -d ./dist ./lib", + "cov": "npm run cppcov && npm run filtercov && npm run mergecov", + "coveralls": "cat ./test/coverage/merged.lcov | coveralls", + "cppcov": "mkdir -p test/coverage/cpp && ./lcov-1.10/bin/lcov --gcov-tool /usr/bin/gcov-4.9 --capture --directory build/Release/obj.target/nodegit/src --output-file test/coverage/cpp/lcov_full.info", + "filtercov": "./lcov-1.10/bin/lcov --extract test/coverage/cpp/lcov_full.info $(pwd)/src/* $(pwd)/src/**/* $(pwd)/include/* $(pwd)/include/**/* --output-file test/coverage/cpp/lcov.info && rm test/coverage/cpp/lcov_full.info", + "generateJson": "node generate/scripts/generateJson", + "generateMissingTests": "node generate/scripts/generateMissingTests", + "generateNativeCode": "node generate/scripts/generateNativeCode", + "install": "node lifecycleScripts/preinstall && node lifecycleScripts/install", + "installDebug": "BUILD_DEBUG=true npm install", + "lint": "jshint lib test/tests test/utils examples lifecycleScripts", + "mergecov": "lcov-result-merger 'test/**/*.info' 'test/coverage/merged.lcov' && ./lcov-1.10/bin/genhtml test/coverage/merged.lcov --output-directory test/coverage/report", + "mocha": "mocha test/runner test/tests --timeout 15000", + "mochaDebug": "mocha --debug-brk test/runner test/tests --timeout 15000", + "postinstall": "node lifecycleScripts/postinstall", + "prepublish": "npm run babel", + "rebuild": "node generate && npm run babel && node-gyp configure build", + "rebuildDebug": "node generate && npm run babel && node-gyp configure --debug build", + "recompile": "node-gyp configure build", + "recompileDebug": "node-gyp configure --debug build", + "test": "npm run lint && node --expose-gc test", + "xcodeDebug": "node-gyp configure -- -f xcode" } } diff --git a/src/base.cc b/src/base.cc deleted file mode 100755 index 940be289f..000000000 --- a/src/base.cc +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Copyright 2013, Tim Branyen @tbranyen - * @author Michael Robinson @codeofinterest - * - * Dual licensed under the MIT and GPL licenses. - */ - -#include -#include - -#include "git2.h" - -#include "../include/wrapper.h" -#include "../include/reference.h" -#include "../include/signature.h" -#include "../include/time.h" -#include "../include/blob.h" -#include "../include/repo.h" -#include "../include/oid.h" -#include "../include/object.h" -#include "../include/commit.h" -#include "../include/revwalk.h" -#include "../include/tree.h" -#include "../include/tree_entry.h" -#include "../include/diff_find_options.h" -#include "../include/diff_options.h" -#include "../include/diff_list.h" -#include "../include/diff_range.h" -#include "../include/diff_file.h" -#include "../include/patch.h" -#include "../include/delta.h" -#include "../include/threads.h" -#include "../include/index.h" -#include "../include/index_entry.h" -#include "../include/index_time.h" -#include "../include/tag.h" -#include "../include/refdb.h" -#include "../include/odb_object.h" -#include "../include/odb.h" -#include "../include/submodule.h" -#include "../include/tree_builder.h" -#include "../include/remote.h" -#include "../include/clone_options.h" - -extern "C" void init(Handle target) { - NanScope(); - - Wrapper::Initialize(target); - - GitReference::Initialize(target); - GitIndex::Initialize(target); - GitIndexEntry::Initialize(target); - GitIndexTime::Initialize(target); - GitTag::Initialize(target); - GitSignature::Initialize(target); - GitTime::Initialize(target); - GitBlob::Initialize(target); - GitOid::Initialize(target); - GitObject::Initialize(target); - GitRepo::Initialize(target); - GitCommit::Initialize(target); - GitRevWalk::Initialize(target); - GitRefDb::Initialize(target); - GitOdb::Initialize(target); - GitOdbObject::Initialize(target); - GitSubmodule::Initialize(target); - - GitTree::Initialize(target); - GitTreeEntry::Initialize(target); - GitTreeBuilder::Initialize(target); - - GitDiffRange::Initialize(target); - GitDiffFindOptions::Initialize(target); - GitDiffOptions::Initialize(target); - GitDiffList::Initialize(target); - GitPatch::Initialize(target); - GitDiffFile::Initialize(target); - GitDelta::Initialize(target); - - GitRemote::Initialize(target); - GitCloneOptions::Initialize(target); - - GitThreads::Initialize(target); - -} - -NODE_MODULE(nodegit, init) diff --git a/src/blob.cc b/src/blob.cc deleted file mode 100755 index bc6068608..000000000 --- a/src/blob.cc +++ /dev/null @@ -1,146 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ -#include -#include -#include - -#include "git2.h" - -#include "../include/functions/copy.h" - -#include "../include/blob.h" -#include "../include/repo.h" -#include "../include/oid.h" -#include "../include/wrapper.h" -#include "node_buffer.h" - -using namespace v8; -using namespace node; - -GitBlob::GitBlob(git_blob *raw) { - this->raw = raw; -} - -GitBlob::~GitBlob() { - git_blob_free(this->raw); -} - -void GitBlob::Initialize(Handle target) { - HandleScope scope; - - Local tpl = FunctionTemplate::New(New); - - tpl->InstanceTemplate()->SetInternalFieldCount(1); - tpl->SetClassName(String::NewSymbol("Blob")); - - NODE_SET_PROTOTYPE_METHOD(tpl, "oid", Oid); - NODE_SET_PROTOTYPE_METHOD(tpl, "content", Content); - NODE_SET_PROTOTYPE_METHOD(tpl, "size", Size); - NODE_SET_PROTOTYPE_METHOD(tpl, "isBinary", IsBinary); - - - constructor_template = Persistent::New(tpl->GetFunction()); - target->Set(String::NewSymbol("Blob"), constructor_template); -} - -Handle GitBlob::New(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsExternal()) { - return ThrowException(Exception::Error(String::New("git_blob is required."))); - } - - GitBlob* object = new GitBlob((git_blob *) External::Unwrap(args[0])); - object->Wrap(args.This()); - - return scope.Close(args.This()); -} - -Handle GitBlob::New(void *raw) { - HandleScope scope; - Handle argv[1] = { External::New((void *)raw) }; - return scope.Close(GitBlob::constructor_template->NewInstance(1, argv)); -} - -git_blob *GitBlob::GetValue() { - return this->raw; -} - - -/** - * @return {Oid} result - */ -Handle GitBlob::Oid(const Arguments& args) { - HandleScope scope; - - - const git_oid * result = git_blob_id( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - if (result != NULL) { - result = (const git_oid * )git_oid_dup(result); - } - if (result != NULL) { - to = GitOid::New((void *)result); - } else { - to = Null(); - } - return scope.Close(to); -} - -/** - * @return {Wrapper} result - */ -Handle GitBlob::Content(const Arguments& args) { - HandleScope scope; - - - const void * result = git_blob_rawcontent( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - if (result != NULL) { - to = Wrapper::New((void *)result); - } else { - to = Null(); - } - return scope.Close(to); -} - -/** - * @return {Number} result - */ -Handle GitBlob::Size(const Arguments& args) { - HandleScope scope; - - - git_off_t result = git_blob_rawsize( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - to = Number::New(result); - return scope.Close(to); -} - -/** - * @return {Boolean} result - */ -Handle GitBlob::IsBinary(const Arguments& args) { - HandleScope scope; - - - int result = git_blob_is_binary( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - to = Boolean::New(result); - return scope.Close(to); -} - -Persistent GitBlob::constructor_template; diff --git a/src/branch.cc b/src/branch.cc deleted file mode 100644 index 043afe60a..000000000 --- a/src/branch.cc +++ /dev/null @@ -1,536 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ -#include -#include -#include - -#include "git2.h" - -#include "../include/functions/copy.h" - -#include "../include/branch.h" - -using namespace v8; -using namespace node; - -Branch::Branch(git_branch *raw) { - this->raw = raw; -} - -Branch::~Branch() { - git_branch_free(this->raw); -} - -void Branch::Initialize(Handle target) { - HandleScope scope; - - Local tpl = FunctionTemplate::New(New); - - tpl->InstanceTemplate()->SetInternalFieldCount(1); - tpl->SetClassName(String::NewSymbol("Branch")); - - NODE_SET_METHOD(tpl, "create", Create); - NODE_SET_METHOD(tpl, "delete", Delete); - NODE_SET_METHOD(tpl, "foreach", Foreach); - NODE_SET_METHOD(tpl, "move", Move); - NODE_SET_METHOD(tpl, "lookup", Lookup); - NODE_SET_METHOD(tpl, "name", Name); - NODE_SET_METHOD(tpl, "upstream", Upstream); - NODE_SET_METHOD(tpl, "setUpstream", SetUpstream); - NODE_SET_METHOD(tpl, "upstreamName", UpstreamName); - NODE_SET_METHOD(tpl, "isHead", IsHead); - NODE_SET_METHOD(tpl, "remoteName", RemoteName); - - - constructor_template = Persistent::New(tpl->GetFunction()); - target->Set(String::NewSymbol("Branch"), constructor_template); -} - -Handle Branch::New(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsExternal()) { - return ThrowException(Exception::Error(String::New("git_branch is required."))); - } - - Branch* object = new Branch((git_branch *) External::Unwrap(args[0])); - object->Wrap(args.This()); - - return scope.Close(args.This()); -} - -Handle Branch::New(void *raw) { - HandleScope scope; - Handle argv[1] = { External::New((void *)raw) }; - return scope.Close(Branch::constructor_template->NewInstance(1, argv)); -} - -git_branch *Branch::GetValue() { - return this->raw; -} - - -/** - * @param {Repository} repo - * @param {String} branch_name - * @param {Commit} target - * @param {Number} force - * @return {Reference} out - */ -Handle Branch::Create(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("Repository repo is required."))); - } - if (args.Length() == 1 || !args[1]->IsString()) { - return ThrowException(Exception::Error(String::New("String branch_name is required."))); - } - if (args.Length() == 2 || !args[2]->IsObject()) { - return ThrowException(Exception::Error(String::New("Commit target is required."))); - } - if (args.Length() == 3 || !args[3]->IsInt32()) { - return ThrowException(Exception::Error(String::New("Number force is required."))); - } - - git_reference * out = 0; - git_repository * from_repo; - from_repo = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - const char * from_branch_name; - String::Utf8Value branch_name(args[1]->ToString()); - from_branch_name = strdup(*branch_name); - const git_commit * from_target; - from_target = ObjectWrap::Unwrap(args[2]->ToObject())->GetValue(); - int from_force; - from_force = (int) args[3]->ToInt32()->Value(); - - int result = git_branch_create( - &out - , from_repo - , from_branch_name - , from_target - , from_force - ); - free((void *)from_branch_name); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - Handle to; - if (out != NULL) { - to = GitReference::New((void *)out); - } else { - to = Null(); - } - return scope.Close(to); -} - -/** - * @param {Reference} branch - */ -Handle Branch::Delete(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("Reference branch is required."))); - } - - git_reference * from_branch; - from_branch = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - - int result = git_branch_delete( - from_branch - ); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - return Undefined(); -} - -/** - * @param {Repository} repo - * @param {Number} list_flags - * @param {BranchForeachCb} branch_cb - * @param {void} payload - */ -Handle Branch::Foreach(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("Repository repo is required."))); - } - if (args.Length() == 1 || !args[1]->IsUint32()) { - return ThrowException(Exception::Error(String::New("Number list_flags is required."))); - } - if (args.Length() == 2 || !args[2]->IsObject()) { - return ThrowException(Exception::Error(String::New("BranchForeachCb branch_cb is required."))); - } - if (args.Length() == 3 || !args[3]->IsObject()) { - return ThrowException(Exception::Error(String::New("void payload is required."))); - } - - git_repository * from_repo; - from_repo = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - unsigned int from_list_flags; - from_list_flags = (unsigned int) args[1]->ToUint32()->Value(); - git_branch_foreach_cb from_branch_cb; - from_branch_cb = ObjectWrap::Unwrap(args[2]->ToObject())->GetValue(); - void * from_payload; - from_payload = ObjectWrap::Unwrap(args[3]->ToObject())->GetValue(); - - int result = git_branch_foreach( - from_repo - , from_list_flags - , from_branch_cb - , from_payload - ); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - return Undefined(); -} - -/** - * @param {Reference} branch - * @param {String} new_branch_name - * @param {Number} force - * @return {Reference} out - */ -Handle Branch::Move(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("Reference branch is required."))); - } - if (args.Length() == 1 || !args[1]->IsString()) { - return ThrowException(Exception::Error(String::New("String new_branch_name is required."))); - } - if (args.Length() == 2 || !args[2]->IsInt32()) { - return ThrowException(Exception::Error(String::New("Number force is required."))); - } - - git_reference * out = 0; - git_reference * from_branch; - from_branch = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - const char * from_new_branch_name; - String::Utf8Value new_branch_name(args[1]->ToString()); - from_new_branch_name = strdup(*new_branch_name); - int from_force; - from_force = (int) args[2]->ToInt32()->Value(); - - int result = git_branch_move( - &out - , from_branch - , from_new_branch_name - , from_force - ); - free((void *)from_new_branch_name); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - Handle to; - if (out != NULL) { - to = GitReference::New((void *)out); - } else { - to = Null(); - } - return scope.Close(to); -} - -/** - * @param {Repository} repo - * @param {String} branch_name - * @param {BranchT} branch_type - * @return {Reference} out - */ -Handle Branch::Lookup(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("Repository repo is required."))); - } - if (args.Length() == 1 || !args[1]->IsString()) { - return ThrowException(Exception::Error(String::New("String branch_name is required."))); - } - if (args.Length() == 2 || !args[2]->IsObject()) { - return ThrowException(Exception::Error(String::New("BranchT branch_type is required."))); - } - - git_reference * out = 0; - git_repository * from_repo; - from_repo = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - const char * from_branch_name; - String::Utf8Value branch_name(args[1]->ToString()); - from_branch_name = strdup(*branch_name); - git_branch_t from_branch_type; - from_branch_type = ObjectWrap::Unwrap(args[2]->ToObject())->GetValue(); - - int result = git_branch_lookup( - &out - , from_repo - , from_branch_name - , from_branch_type - ); - free((void *)from_branch_name); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - Handle to; - if (out != NULL) { - to = GitReference::New((void *)out); - } else { - to = Null(); - } - return scope.Close(to); -} - -/** - * @param {Reference} ref - * @return {String} out - */ -Handle Branch::Name(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("Reference ref is required."))); - } - - const char * out = 0; - git_reference * from_ref; - from_ref = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - - int result = git_branch_name( - &out - , from_ref - ); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - Handle to; - to = String::New(out); - return scope.Close(to); -} - -/** - * @param {Reference} branch - * @return {Reference} out - */ -Handle Branch::Upstream(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("Reference branch is required."))); - } - - git_reference * out = 0; - git_reference * from_branch; - from_branch = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - - int result = git_branch_upstream( - &out - , from_branch - ); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - Handle to; - if (out != NULL) { - to = GitReference::New((void *)out); - } else { - to = Null(); - } - return scope.Close(to); -} - -/** - * @param {Reference} branch - * @param {String} upstream_name - */ -Handle Branch::SetUpstream(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("Reference branch is required."))); - } - if (args.Length() == 1 || !args[1]->IsString()) { - return ThrowException(Exception::Error(String::New("String upstream_name is required."))); - } - - git_reference * from_branch; - from_branch = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - const char * from_upstream_name; - String::Utf8Value upstream_name(args[1]->ToString()); - from_upstream_name = strdup(*upstream_name); - - int result = git_branch_set_upstream( - from_branch - , from_upstream_name - ); - free((void *)from_upstream_name); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - return Undefined(); -} - -/** - * @param {String} tracking_branch_name_out - * @param {Number} buffer_size - * @param {Repository} repo - * @param {String} canonical_branch_name - */ -Handle Branch::UpstreamName(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String tracking_branch_name_out is required."))); - } - if (args.Length() == 1 || !args[1]->IsUint32()) { - return ThrowException(Exception::Error(String::New("Number buffer_size is required."))); - } - if (args.Length() == 2 || !args[2]->IsObject()) { - return ThrowException(Exception::Error(String::New("Repository repo is required."))); - } - if (args.Length() == 3 || !args[3]->IsString()) { - return ThrowException(Exception::Error(String::New("String canonical_branch_name is required."))); - } - - char * from_tracking_branch_name_out; - String::Utf8Value tracking_branch_name_out(args[0]->ToString()); - from_tracking_branch_name_out = strdup(*tracking_branch_name_out); - size_t from_buffer_size; - from_buffer_size = (size_t) args[1]->ToUint32()->Value(); - git_repository * from_repo; - from_repo = ObjectWrap::Unwrap(args[2]->ToObject())->GetValue(); - const char * from_canonical_branch_name; - String::Utf8Value canonical_branch_name(args[3]->ToString()); - from_canonical_branch_name = strdup(*canonical_branch_name); - - int result = git_branch_upstream_name( - from_tracking_branch_name_out - , from_buffer_size - , from_repo - , from_canonical_branch_name - ); - free((void *)from_tracking_branch_name_out); - free((void *)from_canonical_branch_name); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - return Undefined(); -} - -/** - * @param {Reference} branch - */ -Handle Branch::IsHead(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("Reference branch is required."))); - } - - git_reference * from_branch; - from_branch = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - - int result = git_branch_is_head( - from_branch - ); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - return Undefined(); -} - -/** - * @param {String} remote_name_out - * @param {Number} buffer_size - * @param {Repository} repo - * @param {String} canonical_branch_name - */ -Handle Branch::RemoteName(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String remote_name_out is required."))); - } - if (args.Length() == 1 || !args[1]->IsUint32()) { - return ThrowException(Exception::Error(String::New("Number buffer_size is required."))); - } - if (args.Length() == 2 || !args[2]->IsObject()) { - return ThrowException(Exception::Error(String::New("Repository repo is required."))); - } - if (args.Length() == 3 || !args[3]->IsString()) { - return ThrowException(Exception::Error(String::New("String canonical_branch_name is required."))); - } - - char * from_remote_name_out; - String::Utf8Value remote_name_out(args[0]->ToString()); - from_remote_name_out = strdup(*remote_name_out); - size_t from_buffer_size; - from_buffer_size = (size_t) args[1]->ToUint32()->Value(); - git_repository * from_repo; - from_repo = ObjectWrap::Unwrap(args[2]->ToObject())->GetValue(); - const char * from_canonical_branch_name; - String::Utf8Value canonical_branch_name(args[3]->ToString()); - from_canonical_branch_name = strdup(*canonical_branch_name); - - int result = git_branch_remote_name( - from_remote_name_out - , from_buffer_size - , from_repo - , from_canonical_branch_name - ); - free((void *)from_remote_name_out); - free((void *)from_canonical_branch_name); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - return Undefined(); -} - -Persistent Branch::constructor_template; diff --git a/src/clone_options.cc b/src/clone_options.cc deleted file mode 100644 index 3f4b058c5..000000000 --- a/src/clone_options.cc +++ /dev/null @@ -1,63 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ -#include -#include -#include - -#include "git2.h" - -#include "../include/functions/copy.h" - -#include "../include/clone_options.h" - -using namespace v8; -using namespace node; - -GitCloneOptions::GitCloneOptions(git_clone_options *raw) { - this->raw = raw; -} - -GitCloneOptions::~GitCloneOptions() { - free(this->raw); -} - -void GitCloneOptions::Initialize(Handle target) { - HandleScope scope; - - Local tpl = FunctionTemplate::New(New); - - tpl->InstanceTemplate()->SetInternalFieldCount(1); - tpl->SetClassName(String::NewSymbol("CloneOptions")); - - - - constructor_template = Persistent::New(tpl->GetFunction()); - target->Set(String::NewSymbol("CloneOptions"), constructor_template); -} - -Handle GitCloneOptions::New(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsExternal()) { - return ThrowException(Exception::Error(String::New("git_clone_options is required."))); - } - - GitCloneOptions* object = new GitCloneOptions((git_clone_options *) External::Unwrap(args[0])); - object->Wrap(args.This()); - - return scope.Close(args.This()); -} - -Handle GitCloneOptions::New(void *raw) { - HandleScope scope; - Handle argv[1] = { External::New((void *)raw) }; - return scope.Close(GitCloneOptions::constructor_template->NewInstance(1, argv)); -} - -git_clone_options *GitCloneOptions::GetValue() { - return this->raw; -} - - -Persistent GitCloneOptions::constructor_template; diff --git a/src/commit.cc b/src/commit.cc deleted file mode 100755 index 29a1a7877..000000000 --- a/src/commit.cc +++ /dev/null @@ -1,316 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ -#include -#include -#include - -#include "git2.h" - -#include "../include/functions/copy.h" - -#include "../include/commit.h" -#include "../include/oid.h" -#include "../include/repo.h" -#include "../include/signature.h" -#include "../include/tree.h" - -using namespace v8; -using namespace node; - -GitCommit::GitCommit(git_commit *raw) { - this->raw = raw; -} - -GitCommit::~GitCommit() { - git_commit_free(this->raw); -} - -void GitCommit::Initialize(Handle target) { - HandleScope scope; - - Local tpl = FunctionTemplate::New(New); - - tpl->InstanceTemplate()->SetInternalFieldCount(1); - tpl->SetClassName(String::NewSymbol("Commit")); - - NODE_SET_PROTOTYPE_METHOD(tpl, "oid", Oid); - NODE_SET_PROTOTYPE_METHOD(tpl, "messageEncoding", MessageEncoding); - NODE_SET_PROTOTYPE_METHOD(tpl, "message", Message); - NODE_SET_PROTOTYPE_METHOD(tpl, "time", Time); - NODE_SET_PROTOTYPE_METHOD(tpl, "offset", Offset); - NODE_SET_PROTOTYPE_METHOD(tpl, "committer", Committer); - NODE_SET_PROTOTYPE_METHOD(tpl, "author", Author); - NODE_SET_PROTOTYPE_METHOD(tpl, "treeId", TreeId); - NODE_SET_PROTOTYPE_METHOD(tpl, "parentCount", ParentCount); - NODE_SET_PROTOTYPE_METHOD(tpl, "parentId", ParentId); - NODE_SET_PROTOTYPE_METHOD(tpl, "nthGenAncestor", NthGenAncestor); - - - constructor_template = Persistent::New(tpl->GetFunction()); - target->Set(String::NewSymbol("Commit"), constructor_template); -} - -Handle GitCommit::New(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsExternal()) { - return ThrowException(Exception::Error(String::New("git_commit is required."))); - } - - GitCommit* object = new GitCommit((git_commit *) External::Unwrap(args[0])); - object->Wrap(args.This()); - - return scope.Close(args.This()); -} - -Handle GitCommit::New(void *raw) { - HandleScope scope; - Handle argv[1] = { External::New((void *)raw) }; - return scope.Close(GitCommit::constructor_template->NewInstance(1, argv)); -} - -git_commit *GitCommit::GetValue() { - return this->raw; -} - - -/** - * @return {Oid} result - */ -Handle GitCommit::Oid(const Arguments& args) { - HandleScope scope; - - - const git_oid * result = git_commit_id( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - if (result != NULL) { - result = (const git_oid * )git_oid_dup(result); - } - if (result != NULL) { - to = GitOid::New((void *)result); - } else { - to = Null(); - } - return scope.Close(to); -} - -/** - * @return {String} result - */ -Handle GitCommit::MessageEncoding(const Arguments& args) { - HandleScope scope; - - - const char * result = git_commit_message_encoding( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - to = String::New(result); - return scope.Close(to); -} - -/** - * @return {String} result - */ -Handle GitCommit::Message(const Arguments& args) { - HandleScope scope; - - - const char * result = git_commit_message( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - to = String::New(result); - return scope.Close(to); -} - -/** - * @return {Number} result - */ -Handle GitCommit::Time(const Arguments& args) { - HandleScope scope; - - - git_time_t result = git_commit_time( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - to = Number::New(result); - return scope.Close(to); -} - -/** - * @return {Number} result - */ -Handle GitCommit::Offset(const Arguments& args) { - HandleScope scope; - - - int result = git_commit_time_offset( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - to = Integer::New(result); - return scope.Close(to); -} - -/** - * @return {Signature} result - */ -Handle GitCommit::Committer(const Arguments& args) { - HandleScope scope; - - - const git_signature * result = git_commit_committer( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - if (result != NULL) { - result = (const git_signature * )git_signature_dup(result); - } - if (result != NULL) { - to = GitSignature::New((void *)result); - } else { - to = Null(); - } - return scope.Close(to); -} - -/** - * @return {Signature} result - */ -Handle GitCommit::Author(const Arguments& args) { - HandleScope scope; - - - const git_signature * result = git_commit_author( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - if (result != NULL) { - result = (const git_signature * )git_signature_dup(result); - } - if (result != NULL) { - to = GitSignature::New((void *)result); - } else { - to = Null(); - } - return scope.Close(to); -} - -/** - * @return {Oid} result - */ -Handle GitCommit::TreeId(const Arguments& args) { - HandleScope scope; - - - const git_oid * result = git_commit_tree_id( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - if (result != NULL) { - result = (const git_oid * )git_oid_dup(result); - } - if (result != NULL) { - to = GitOid::New((void *)result); - } else { - to = Null(); - } - return scope.Close(to); -} - -/** - * @return {Number} result - */ -Handle GitCommit::ParentCount(const Arguments& args) { - HandleScope scope; - - - unsigned int result = git_commit_parentcount( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - to = Uint32::New(result); - return scope.Close(to); -} - -/** - * @param {Number} n - * @return {Oid} result - */ -Handle GitCommit::ParentId(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsUint32()) { - return ThrowException(Exception::Error(String::New("Number n is required."))); - } - - unsigned int from_n; - from_n = (unsigned int) args[0]->ToUint32()->Value(); - - const git_oid * result = git_commit_parent_id( - ObjectWrap::Unwrap(args.This())->GetValue() - , from_n - ); - - Handle to; - if (result != NULL) { - result = (const git_oid * )git_oid_dup(result); - } - if (result != NULL) { - to = GitOid::New((void *)result); - } else { - to = Null(); - } - return scope.Close(to); -} - -/** - * @param {Number} n - * @return {Commit} ancestor - */ -Handle GitCommit::NthGenAncestor(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsUint32()) { - return ThrowException(Exception::Error(String::New("Number n is required."))); - } - - git_commit * ancestor = 0; - unsigned int from_n; - from_n = (unsigned int) args[0]->ToUint32()->Value(); - - int result = git_commit_nth_gen_ancestor( - &ancestor - , ObjectWrap::Unwrap(args.This())->GetValue() - , from_n - ); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - Handle to; - if (ancestor != NULL) { - to = GitCommit::New((void *)ancestor); - } else { - to = Null(); - } - return scope.Close(to); -} - -Persistent GitCommit::constructor_template; diff --git a/src/delta.cc b/src/delta.cc deleted file mode 100644 index 7b45d35f9..000000000 --- a/src/delta.cc +++ /dev/null @@ -1,138 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ -#include -#include -#include - -#include "git2.h" - -#include "../include/functions/copy.h" - -#include "../include/delta.h" -#include "../include/diff_file.h" - -using namespace v8; -using namespace node; - -GitDelta::GitDelta(git_diff_delta *raw) { - this->raw = raw; -} - -GitDelta::~GitDelta() { - free(this->raw); -} - -void GitDelta::Initialize(Handle target) { - HandleScope scope; - - Local tpl = FunctionTemplate::New(New); - - tpl->InstanceTemplate()->SetInternalFieldCount(1); - tpl->SetClassName(String::NewSymbol("Delta")); - - - NODE_SET_PROTOTYPE_METHOD(tpl, "oldFile", OldFile); - NODE_SET_PROTOTYPE_METHOD(tpl, "newFile", NewFile); - NODE_SET_PROTOTYPE_METHOD(tpl, "status", Status); - NODE_SET_PROTOTYPE_METHOD(tpl, "similarity", Similarity); - NODE_SET_PROTOTYPE_METHOD(tpl, "flags", Flags); - - constructor_template = Persistent::New(tpl->GetFunction()); - target->Set(String::NewSymbol("Delta"), constructor_template); -} - -Handle GitDelta::New(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsExternal()) { - return ThrowException(Exception::Error(String::New("git_diff_delta is required."))); - } - - GitDelta* object = new GitDelta((git_diff_delta *) External::Unwrap(args[0])); - object->Wrap(args.This()); - - return scope.Close(args.This()); -} - -Handle GitDelta::New(void *raw) { - HandleScope scope; - Handle argv[1] = { External::New((void *)raw) }; - return scope.Close(GitDelta::constructor_template->NewInstance(1, argv)); -} - -git_diff_delta *GitDelta::GetValue() { - return this->raw; -} - - -Handle GitDelta::OldFile(const Arguments& args) { - HandleScope scope; - Handle to; - - git_diff_file *old_file = - &ObjectWrap::Unwrap(args.This())->GetValue()->old_file; - - if (old_file != NULL) { - old_file = (git_diff_file *)git_diff_file_dup(old_file); - } - if (old_file != NULL) { - to = GitDiffFile::New((void *)old_file); - } else { - to = Null(); - } - return scope.Close(to); -} - -Handle GitDelta::NewFile(const Arguments& args) { - HandleScope scope; - Handle to; - - git_diff_file *new_file = - &ObjectWrap::Unwrap(args.This())->GetValue()->new_file; - - if (new_file != NULL) { - new_file = (git_diff_file *)git_diff_file_dup(new_file); - } - if (new_file != NULL) { - to = GitDiffFile::New((void *)new_file); - } else { - to = Null(); - } - return scope.Close(to); -} - -Handle GitDelta::Status(const Arguments& args) { - HandleScope scope; - Handle to; - - git_delta_t status = - ObjectWrap::Unwrap(args.This())->GetValue()->status; - - to = Integer::New(status); - return scope.Close(to); -} - -Handle GitDelta::Similarity(const Arguments& args) { - HandleScope scope; - Handle to; - - uint32_t similarity = - ObjectWrap::Unwrap(args.This())->GetValue()->similarity; - - to = Integer::New(similarity); - return scope.Close(to); -} - -Handle GitDelta::Flags(const Arguments& args) { - HandleScope scope; - Handle to; - - uint32_t flags = - ObjectWrap::Unwrap(args.This())->GetValue()->flags; - - to = Integer::New(flags); - return scope.Close(to); -} - -Persistent GitDelta::constructor_template; diff --git a/src/diff.cc b/src/diff.cc deleted file mode 100644 index 70fad85b6..000000000 --- a/src/diff.cc +++ /dev/null @@ -1,518 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ -#include -#include -#include - -#include "git2.h" - -#include "../include/diff.h" - -#include "../include/functions/utilities.h" -#include "../include/functions/string.h" - -using namespace v8; -using namespace node; - -GitDiff::GitDiff(git_diff_list *raw) { - this->raw = raw; -} - -GitDiff::~GitDiff() { - git_diff_list_free(this->raw); -} - -void GitDiff::Initialize(Handle target) { - NanScope(); - - Local tpl = FunctionTemplate::New(New); - - tpl->InstanceTemplate()->SetInternalFieldCount(1); - tpl->SetClassName(NanSymbol("Diff")); - - NODE_SET_METHOD(tpl, "treeToTree", TreeToTree); - NODE_SET_METHOD(tpl, "treeToIndex", TreeToIndex); - NODE_SET_METHOD(tpl, "indexToWorkdir", IndexToWorkdir); - NODE_SET_METHOD(tpl, "treeToWorkdir", TreeToWorkdir); - NODE_SET_PROTOTYPE_METHOD(tpl, "merge", Merge); - NODE_SET_PROTOTYPE_METHOD(tpl, "findSimilar", FindSimilar); - NODE_SET_METHOD(tpl, "statusChar", StatusChar); - NODE_SET_PROTOTYPE_METHOD(tpl, "numDeltas", NumDeltas); - NODE_SET_METHOD(tpl, "numDeltasOfType", NumDeltasOfType); - NODE_SET_PROTOTYPE_METHOD(tpl, "getPatch", GetPatch); - - NanAssignPersistent(FunctionTemplate, constructor_template, tpl); - target->Set(String::NewSymbol("Diff"), tpl->GetFunction()); -} - -Handle GitDiff::New(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsExternal()) { - return ThrowException(Exception::Error(String::New("git_diff_list is required."))); - } - - GitDiff* object = new GitDiff((git_diff_list *) External::Unwrap(args[0])); - object->Wrap(args.This()); - - return scope.Close(args.This()); -} - -git_diff_list *GitDiff::GetValue() { - return this->raw; -} - - -Handle GitDiff::TreeToTree(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("Repository repo is required."))); - } - if (args.Length() == 1 || !args[1]->IsObject()) { - return ThrowException(Exception::Error(String::New("Tree old_tree is required."))); - } - if (args.Length() == 2 || !args[2]->IsObject()) { - return ThrowException(Exception::Error(String::New("Tree new_tree is required."))); - } - if (args.Length() == 3 || !args[3]->IsObject()) { - return ThrowException(Exception::Error(String::New("DiffOptions opts is required."))); - } - if (args.Length() == 4 || !args[4]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - TreeToTreeBaton* baton = new TreeToTreeBaton; - baton->error = NULL; - baton->request.data = baton; - baton->repoReference = Persistent::New(args[0]); - baton->repo = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - baton->old_treeReference = Persistent::New(args[1]); - baton->old_tree = ObjectWrap::Unwrap(args[1]->ToObject())->GetValue(); - baton->new_treeReference = Persistent::New(args[2]); - baton->new_tree = ObjectWrap::Unwrap(args[2]->ToObject())->GetValue(); - baton->optsReference = Persistent::New(args[3]); - baton->opts = ObjectWrap::Unwrap(args[3]->ToObject())->GetValue(); - baton->callback = Persistent::New(Local::Cast(args[4])); - - uv_queue_work(uv_default_loop(), &baton->request, TreeToTreeWork, (uv_after_work_cb)TreeToTreeAfterWork); - - return Undefined(); -} - -void GitDiff::TreeToTreeWork(uv_work_t *req) { - TreeToTreeBaton *baton = static_cast(req->data); - int diff = git_diff_tree_to_tree( - &baton->diff, - baton->repo, - baton->old_tree, - baton->new_tree, - baton->opts - ); - if (diff != GIT_OK) { - baton->error = giterr_last(); - } -} - -void GitDiff::TreeToTreeAfterWork(uv_work_t *req) { - HandleScope scope; - TreeToTreeBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (!baton->error) { - Handle argv[1] = { External::New(baton->diff) }; - Handle diff = GitDiffList::constructor_template->NewInstance(1, argv); - Handle argv2[2] = { - Local::New(Null()), - diff - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv2); - } else { - Handle argv2[1] = { - GitError::WrapError(baton->error) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv2); - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->repoReference.Dispose(); - baton->old_treeReference.Dispose(); - baton->new_treeReference.Dispose(); - baton->optsReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -Handle GitDiff::TreeToIndex(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("Repository repo is required."))); - } - if (args.Length() == 1 || !args[1]->IsObject()) { - return ThrowException(Exception::Error(String::New("Tree old_tree is required."))); - } - if (args.Length() == 2 || !args[2]->IsObject()) { - return ThrowException(Exception::Error(String::New("Index index is required."))); - } - if (args.Length() == 3 || !args[3]->IsObject()) { - return ThrowException(Exception::Error(String::New("DiffOptions opts is required."))); - } - if (args.Length() == 4 || !args[4]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - TreeToIndexBaton* baton = new TreeToIndexBaton; - baton->error = NULL; - baton->request.data = baton; - baton->repoReference = Persistent::New(args[0]); - baton->repo = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - baton->old_treeReference = Persistent::New(args[1]); - baton->old_tree = ObjectWrap::Unwrap(args[1]->ToObject())->GetValue(); - baton->indexReference = Persistent::New(args[2]); - baton->index = ObjectWrap::Unwrap(args[2]->ToObject())->GetValue(); - baton->optsReference = Persistent::New(args[3]); - baton->opts = ObjectWrap::Unwrap(args[3]->ToObject())->GetValue(); - baton->callback = Persistent::New(Local::Cast(args[4])); - - uv_queue_work(uv_default_loop(), &baton->request, TreeToIndexWork, (uv_after_work_cb)TreeToIndexAfterWork); - - return Undefined(); -} - -void GitDiff::TreeToIndexWork(uv_work_t *req) { - TreeToIndexBaton *baton = static_cast(req->data); - int diff = git_diff_tree_to_index( - &baton->diff, - baton->repo, - baton->old_tree, - baton->index, - baton->opts - ); - if (diff != GIT_OK) { - baton->error = giterr_last(); - } -} - -void GitDiff::TreeToIndexAfterWork(uv_work_t *req) { - HandleScope scope; - TreeToIndexBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (!baton->error) { - Handle argv[1] = { External::New(baton->diff) }; - Handle diff = GitDiffList::constructor_template->NewInstance(1, argv); - Handle argv2[2] = { - Local::New(Null()), - diff - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv2); - } else { - Handle argv2[1] = { - GitError::WrapError(baton->error) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv2); - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->repoReference.Dispose(); - baton->old_treeReference.Dispose(); - baton->indexReference.Dispose(); - baton->optsReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -Handle GitDiff::IndexToWorkdir(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("Repository repo is required."))); - } - if (args.Length() == 1 || !args[1]->IsObject()) { - return ThrowException(Exception::Error(String::New("Index index is required."))); - } - if (args.Length() == 2 || !args[2]->IsObject()) { - return ThrowException(Exception::Error(String::New("DiffOptions opts is required."))); - } - if (args.Length() == 3 || !args[3]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - IndexToWorkdirBaton* baton = new IndexToWorkdirBaton; - baton->error = NULL; - baton->request.data = baton; - baton->repoReference = Persistent::New(args[0]); - baton->repo = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - baton->indexReference = Persistent::New(args[1]); - baton->index = ObjectWrap::Unwrap(args[1]->ToObject())->GetValue(); - baton->optsReference = Persistent::New(args[2]); - baton->opts = ObjectWrap::Unwrap(args[2]->ToObject())->GetValue(); - baton->callback = Persistent::New(Local::Cast(args[3])); - - uv_queue_work(uv_default_loop(), &baton->request, IndexToWorkdirWork, (uv_after_work_cb)IndexToWorkdirAfterWork); - - return Undefined(); -} - -void GitDiff::IndexToWorkdirWork(uv_work_t *req) { - IndexToWorkdirBaton *baton = static_cast(req->data); - int diff = git_diff_index_to_workdir( - &baton->diff, - baton->repo, - baton->index, - baton->opts - ); - if (diff != GIT_OK) { - baton->error = giterr_last(); - } -} - -void GitDiff::IndexToWorkdirAfterWork(uv_work_t *req) { - HandleScope scope; - IndexToWorkdirBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (!baton->error) { - Handle argv[1] = { External::New(baton->diff) }; - Handle diff = GitDiffList::constructor_template->NewInstance(1, argv); - Handle argv2[2] = { - Local::New(Null()), - diff - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv2); - } else { - Handle argv2[1] = { - GitError::WrapError(baton->error) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv2); - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->repoReference.Dispose(); - baton->indexReference.Dispose(); - baton->optsReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -Handle GitDiff::TreeToWorkdir(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("Repository repo is required."))); - } - if (args.Length() == 1 || !args[1]->IsObject()) { - return ThrowException(Exception::Error(String::New("Tree old_tree is required."))); - } - if (args.Length() == 2 || !args[2]->IsObject()) { - return ThrowException(Exception::Error(String::New("DiffOptions opts is required."))); - } - if (args.Length() == 3 || !args[3]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - TreeToWorkdirBaton* baton = new TreeToWorkdirBaton; - baton->error = NULL; - baton->request.data = baton; - baton->repoReference = Persistent::New(args[0]); - baton->repo = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - baton->old_treeReference = Persistent::New(args[1]); - baton->old_tree = ObjectWrap::Unwrap(args[1]->ToObject())->GetValue(); - baton->optsReference = Persistent::New(args[2]); - baton->opts = ObjectWrap::Unwrap(args[2]->ToObject())->GetValue(); - baton->callback = Persistent::New(Local::Cast(args[3])); - - uv_queue_work(uv_default_loop(), &baton->request, TreeToWorkdirWork, (uv_after_work_cb)TreeToWorkdirAfterWork); - - return Undefined(); -} - -void GitDiff::TreeToWorkdirWork(uv_work_t *req) { - TreeToWorkdirBaton *baton = static_cast(req->data); - int diff = git_diff_tree_to_workdir( - &baton->diff, - baton->repo, - baton->old_tree, - baton->opts - ); - if (diff != GIT_OK) { - baton->error = giterr_last(); - } -} - -void GitDiff::TreeToWorkdirAfterWork(uv_work_t *req) { - HandleScope scope; - TreeToWorkdirBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (!baton->error) { - Handle argv[1] = { External::New(baton->diff) }; - Handle diff = GitDiffList::constructor_template->NewInstance(1, argv); - Handle argv2[2] = { - Local::New(Null()), - diff - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv2); - } else { - Handle argv2[1] = { - GitError::WrapError(baton->error) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv2); - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->repoReference.Dispose(); - baton->old_treeReference.Dispose(); - baton->optsReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -Handle GitDiff::Merge(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("DiffList from is required."))); - } - - int result = git_diff_merge( - - - ObjectWrap::Unwrap(args.This())->GetValue() -, - - ObjectWrap::Unwrap(args[0]->ToObject())->GetValue() - ); - - if (result != GIT_OK) { - return ThrowException(GitError::WrapError(giterr_last())); - } - - return scope.Close(Int32::New(result)); -} - -Handle GitDiff::FindSimilar(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("DiffFindOptions options is required."))); - } - - int result = git_diff_find_similar( - - - ObjectWrap::Unwrap(args.This())->GetValue() -, - - ObjectWrap::Unwrap(args[0]->ToObject())->GetValue() - ); - - if (result != GIT_OK) { - return ThrowException(GitError::WrapError(giterr_last())); - } - - return scope.Close(Int32::New(result)); -} - -Handle GitDiff::StatusChar(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsInt32()) { - return ThrowException(Exception::Error(String::New("Number status is required."))); - } - - char result = git_diff_status_char( - - - (git_delta_t) args[0]->ToInt32()->Value() - ); - - - return scope.Close(String::New(result)); -} - -Handle GitDiff::NumDeltas(const Arguments& args) { - HandleScope scope; - - size_t result = git_diff_num_deltas( - - - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - - return scope.Close(Uint32::New(result)); -} - -Handle GitDiff::NumDeltasOfType(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("DiffList diff is required."))); - } - - if (args.Length() == 1 || !args[1]->IsInt32()) { - return ThrowException(Exception::Error(String::New("Number type is required."))); - } - - size_t result = git_diff_num_deltas_of_type( - - - ObjectWrap::Unwrap(args[0]->ToObject())->GetValue() -, - - (git_delta_t) args[1]->ToInt32()->Value() - ); - - - return scope.Close(Uint32::New(result)); -} - -Handle GitDiff::GetPatch(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("DiffDelta delta_out is required."))); - } - - if (args.Length() == 1 || !args[1]->IsUint32()) { - return ThrowException(Exception::Error(String::New("Number idx is required."))); - } - git_diff_patch * patch_out; - - int result = git_diff_get_patch( - -& - patch_out -, -& - ObjectWrap::Unwrap(args[0]->ToObject())->GetValue() -, - - ObjectWrap::Unwrap(args.This())->GetValue() -, - - (size_t) args[1]->ToUint32()->Value() - ); - - if (result != GIT_OK) { - return ThrowException(GitError::WrapError(giterr_last())); - } - - // XXX need to copy object? - Handle argv[1] = { External::New((void *)patch_out) }; - return scope.Close(DiffPatch::constructor_template->NewInstance(1, argv)); -} - - -Persistent GitDiff::constructor_template; diff --git a/src/diff_file.cc b/src/diff_file.cc deleted file mode 100644 index c96d4c8be..000000000 --- a/src/diff_file.cc +++ /dev/null @@ -1,131 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ -#include -#include -#include - -#include "git2.h" - -#include "../include/functions/copy.h" - -#include "../include/diff_file.h" -#include "../include/oid.h" - -using namespace v8; -using namespace node; - -GitDiffFile::GitDiffFile(git_diff_file *raw) { - this->raw = raw; -} - -GitDiffFile::~GitDiffFile() { - free(this->raw); -} - -void GitDiffFile::Initialize(Handle target) { - HandleScope scope; - - Local tpl = FunctionTemplate::New(New); - - tpl->InstanceTemplate()->SetInternalFieldCount(1); - tpl->SetClassName(String::NewSymbol("DiffFile")); - - - NODE_SET_PROTOTYPE_METHOD(tpl, "oid", Oid); - NODE_SET_PROTOTYPE_METHOD(tpl, "path", Path); - NODE_SET_PROTOTYPE_METHOD(tpl, "size", Size); - NODE_SET_PROTOTYPE_METHOD(tpl, "flags", Flags); - NODE_SET_PROTOTYPE_METHOD(tpl, "mode", Mode); - - constructor_template = Persistent::New(tpl->GetFunction()); - target->Set(String::NewSymbol("DiffFile"), constructor_template); -} - -Handle GitDiffFile::New(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsExternal()) { - return ThrowException(Exception::Error(String::New("git_diff_file is required."))); - } - - GitDiffFile* object = new GitDiffFile((git_diff_file *) External::Unwrap(args[0])); - object->Wrap(args.This()); - - return scope.Close(args.This()); -} - -Handle GitDiffFile::New(void *raw) { - HandleScope scope; - Handle argv[1] = { External::New((void *)raw) }; - return scope.Close(GitDiffFile::constructor_template->NewInstance(1, argv)); -} - -git_diff_file *GitDiffFile::GetValue() { - return this->raw; -} - - -Handle GitDiffFile::Oid(const Arguments& args) { - HandleScope scope; - Handle to; - - git_oid *oid = - &ObjectWrap::Unwrap(args.This())->GetValue()->oid; - - if (oid != NULL) { - oid = (git_oid *)git_oid_dup(oid); - } - if (oid != NULL) { - to = GitOid::New((void *)oid); - } else { - to = Null(); - } - return scope.Close(to); -} - -Handle GitDiffFile::Path(const Arguments& args) { - HandleScope scope; - Handle to; - - const char * path = - ObjectWrap::Unwrap(args.This())->GetValue()->path; - - to = String::New(path); - return scope.Close(to); -} - -Handle GitDiffFile::Size(const Arguments& args) { - HandleScope scope; - Handle to; - - git_off_t size = - ObjectWrap::Unwrap(args.This())->GetValue()->size; - - to = Integer::New(size); - return scope.Close(to); -} - -Handle GitDiffFile::Flags(const Arguments& args) { - HandleScope scope; - Handle to; - - uint32_t flags = - ObjectWrap::Unwrap(args.This())->GetValue()->flags; - - to = Integer::New(flags); - return scope.Close(to); -} - -Handle GitDiffFile::Mode(const Arguments& args) { - HandleScope scope; - Handle to; - - uint16_t mode = - ObjectWrap::Unwrap(args.This())->GetValue()->mode; - - to = Integer::New(mode); - return scope.Close(to); -} - -Persistent GitDiffFile::constructor_template; diff --git a/src/diff_find_options.cc b/src/diff_find_options.cc deleted file mode 100644 index f6fd85756..000000000 --- a/src/diff_find_options.cc +++ /dev/null @@ -1,63 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ -#include -#include -#include - -#include "git2.h" - -#include "../include/functions/copy.h" - -#include "../include/diff_find_options.h" - -using namespace v8; -using namespace node; - -GitDiffFindOptions::GitDiffFindOptions(git_diff_find_options *raw) { - this->raw = raw; -} - -GitDiffFindOptions::~GitDiffFindOptions() { - free(this->raw); -} - -void GitDiffFindOptions::Initialize(Handle target) { - HandleScope scope; - - Local tpl = FunctionTemplate::New(New); - - tpl->InstanceTemplate()->SetInternalFieldCount(1); - tpl->SetClassName(String::NewSymbol("DiffFindOptions")); - - - - constructor_template = Persistent::New(tpl->GetFunction()); - target->Set(String::NewSymbol("DiffFindOptions"), constructor_template); -} - -Handle GitDiffFindOptions::New(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsExternal()) { - return ThrowException(Exception::Error(String::New("git_diff_find_options is required."))); - } - - GitDiffFindOptions* object = new GitDiffFindOptions((git_diff_find_options *) External::Unwrap(args[0])); - object->Wrap(args.This()); - - return scope.Close(args.This()); -} - -Handle GitDiffFindOptions::New(void *raw) { - HandleScope scope; - Handle argv[1] = { External::New((void *)raw) }; - return scope.Close(GitDiffFindOptions::constructor_template->NewInstance(1, argv)); -} - -git_diff_find_options *GitDiffFindOptions::GetValue() { - return this->raw; -} - - -Persistent GitDiffFindOptions::constructor_template; diff --git a/src/diff_list.cc b/src/diff_list.cc deleted file mode 100755 index fbd7e1398..000000000 --- a/src/diff_list.cc +++ /dev/null @@ -1,226 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ -#include -#include -#include - -#include "git2.h" - -#include "../include/functions/copy.h" - -#include "../include/diff_list.h" -#include "../include/diff_options.h" -#include "../include/diff_find_options.h" -#include "../include/repo.h" -#include "../include/tree.h" -#include "../include/index.h" -#include "../include/patch.h" -#include "../include/delta.h" - -using namespace v8; -using namespace node; - -GitDiffList::GitDiffList(git_diff_list *raw) { - this->raw = raw; -} - -GitDiffList::~GitDiffList() { - git_diff_list_free(this->raw); -} - -void GitDiffList::Initialize(Handle target) { - HandleScope scope; - - Local tpl = FunctionTemplate::New(New); - - tpl->InstanceTemplate()->SetInternalFieldCount(1); - tpl->SetClassName(String::NewSymbol("DiffList")); - - NODE_SET_PROTOTYPE_METHOD(tpl, "merge", Merge); - NODE_SET_PROTOTYPE_METHOD(tpl, "findSimilar", FindSimilar); - NODE_SET_PROTOTYPE_METHOD(tpl, "size", Size); - NODE_SET_METHOD(tpl, "numDeltasOfType", NumDeltasOfType); - NODE_SET_PROTOTYPE_METHOD(tpl, "patch", Patch); - - - constructor_template = Persistent::New(tpl->GetFunction()); - target->Set(String::NewSymbol("DiffList"), constructor_template); -} - -Handle GitDiffList::New(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsExternal()) { - return ThrowException(Exception::Error(String::New("git_diff_list is required."))); - } - - GitDiffList* object = new GitDiffList((git_diff_list *) External::Unwrap(args[0])); - object->Wrap(args.This()); - - return scope.Close(args.This()); -} - -Handle GitDiffList::New(void *raw) { - HandleScope scope; - Handle argv[1] = { External::New((void *)raw) }; - return scope.Close(GitDiffList::constructor_template->NewInstance(1, argv)); -} - -git_diff_list *GitDiffList::GetValue() { - return this->raw; -} - - -/** - * @param {DiffList} from - */ -Handle GitDiffList::Merge(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("DiffList from is required."))); - } - - const git_diff_list * from_from; - from_from = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - - int result = git_diff_merge( - ObjectWrap::Unwrap(args.This())->GetValue() - , from_from - ); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - return Undefined(); -} - -/** - * @param {DiffFindOptions} options - */ -Handle GitDiffList::FindSimilar(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("DiffFindOptions options is required."))); - } - - git_diff_find_options * from_options; - from_options = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - - int result = git_diff_find_similar( - ObjectWrap::Unwrap(args.This())->GetValue() - , from_options - ); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - return Undefined(); -} - -/** - * @return {Number} result - */ -Handle GitDiffList::Size(const Arguments& args) { - HandleScope scope; - - - size_t result = git_diff_num_deltas( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - to = Uint32::New(result); - return scope.Close(to); -} - -/** - * @param {DiffList} diff - * @param {Number} type - * @return {Number} result - */ -Handle GitDiffList::NumDeltasOfType(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("DiffList diff is required."))); - } - if (args.Length() == 1 || !args[1]->IsInt32()) { - return ThrowException(Exception::Error(String::New("Number type is required."))); - } - - git_diff_list * from_diff; - from_diff = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - git_delta_t from_type; - from_type = (git_delta_t) args[1]->ToInt32()->Value(); - - size_t result = git_diff_num_deltas_of_type( - from_diff - , from_type - ); - - Handle to; - to = Uint32::New(result); - return scope.Close(to); -} - -/** - * @param {Number} idx - * @return {Patch} patch_out - * @return {Delta} delta_out - */ -Handle GitDiffList::Patch(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsUint32()) { - return ThrowException(Exception::Error(String::New("Number idx is required."))); - } - - git_diff_patch * patch_out = 0; - const git_diff_delta * delta_out = 0; - size_t from_idx; - from_idx = (size_t) args[0]->ToUint32()->Value(); - - int result = git_diff_get_patch( - &patch_out - , &delta_out - , ObjectWrap::Unwrap(args.This())->GetValue() - , from_idx - ); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - Handle toReturn = Object::New(); - Handle to; - if (patch_out != NULL) { - to = GitPatch::New((void *)patch_out); - } else { - to = Null(); - } - toReturn->Set(String::NewSymbol("patch"), to); - - if (delta_out != NULL) { - delta_out = (const git_diff_delta * )git_diff_delta_dup(delta_out); - } - if (delta_out != NULL) { - to = GitDelta::New((void *)delta_out); - } else { - to = Null(); - } - toReturn->Set(String::NewSymbol("delta"), to); - - return scope.Close(toReturn); -} - -Persistent GitDiffList::constructor_template; diff --git a/src/diff_options.cc b/src/diff_options.cc deleted file mode 100644 index dad3fd108..000000000 --- a/src/diff_options.cc +++ /dev/null @@ -1,63 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ -#include -#include -#include - -#include "git2.h" - -#include "../include/functions/copy.h" - -#include "../include/diff_options.h" - -using namespace v8; -using namespace node; - -GitDiffOptions::GitDiffOptions(git_diff_options *raw) { - this->raw = raw; -} - -GitDiffOptions::~GitDiffOptions() { - free(this->raw); -} - -void GitDiffOptions::Initialize(Handle target) { - HandleScope scope; - - Local tpl = FunctionTemplate::New(New); - - tpl->InstanceTemplate()->SetInternalFieldCount(1); - tpl->SetClassName(String::NewSymbol("DiffOptions")); - - - - constructor_template = Persistent::New(tpl->GetFunction()); - target->Set(String::NewSymbol("DiffOptions"), constructor_template); -} - -Handle GitDiffOptions::New(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsExternal()) { - return ThrowException(Exception::Error(String::New("git_diff_options is required."))); - } - - GitDiffOptions* object = new GitDiffOptions((git_diff_options *) External::Unwrap(args[0])); - object->Wrap(args.This()); - - return scope.Close(args.This()); -} - -Handle GitDiffOptions::New(void *raw) { - HandleScope scope; - Handle argv[1] = { External::New((void *)raw) }; - return scope.Close(GitDiffOptions::constructor_template->NewInstance(1, argv)); -} - -git_diff_options *GitDiffOptions::GetValue() { - return this->raw; -} - - -Persistent GitDiffOptions::constructor_template; diff --git a/src/diff_range.cc b/src/diff_range.cc deleted file mode 100644 index cacbd49b5..000000000 --- a/src/diff_range.cc +++ /dev/null @@ -1,111 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ -#include -#include -#include - -#include "git2.h" - -#include "../include/functions/copy.h" - -#include "../include/diff_range.h" - -using namespace v8; -using namespace node; - -GitDiffRange::GitDiffRange(git_diff_range *raw) { - this->raw = raw; -} - -GitDiffRange::~GitDiffRange() { - free(this->raw); -} - -void GitDiffRange::Initialize(Handle target) { - HandleScope scope; - - Local tpl = FunctionTemplate::New(New); - - tpl->InstanceTemplate()->SetInternalFieldCount(1); - tpl->SetClassName(String::NewSymbol("DiffRange")); - - - NODE_SET_PROTOTYPE_METHOD(tpl, "oldStart", OldStart); - NODE_SET_PROTOTYPE_METHOD(tpl, "oldLines", OldLines); - NODE_SET_PROTOTYPE_METHOD(tpl, "newStart", NewStart); - NODE_SET_PROTOTYPE_METHOD(tpl, "newLines", NewLines); - - constructor_template = Persistent::New(tpl->GetFunction()); - target->Set(String::NewSymbol("DiffRange"), constructor_template); -} - -Handle GitDiffRange::New(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsExternal()) { - return ThrowException(Exception::Error(String::New("git_diff_range is required."))); - } - - GitDiffRange* object = new GitDiffRange((git_diff_range *) External::Unwrap(args[0])); - object->Wrap(args.This()); - - return scope.Close(args.This()); -} - -Handle GitDiffRange::New(void *raw) { - HandleScope scope; - Handle argv[1] = { External::New((void *)raw) }; - return scope.Close(GitDiffRange::constructor_template->NewInstance(1, argv)); -} - -git_diff_range *GitDiffRange::GetValue() { - return this->raw; -} - - -Handle GitDiffRange::OldStart(const Arguments& args) { - HandleScope scope; - Handle to; - - int old_start = - ObjectWrap::Unwrap(args.This())->GetValue()->old_start; - - to = Integer::New(old_start); - return scope.Close(to); -} - -Handle GitDiffRange::OldLines(const Arguments& args) { - HandleScope scope; - Handle to; - - int old_lines = - ObjectWrap::Unwrap(args.This())->GetValue()->old_lines; - - to = Integer::New(old_lines); - return scope.Close(to); -} - -Handle GitDiffRange::NewStart(const Arguments& args) { - HandleScope scope; - Handle to; - - int new_start = - ObjectWrap::Unwrap(args.This())->GetValue()->new_start; - - to = Integer::New(new_start); - return scope.Close(to); -} - -Handle GitDiffRange::NewLines(const Arguments& args) { - HandleScope scope; - Handle to; - - int new_lines = - ObjectWrap::Unwrap(args.This())->GetValue()->new_lines; - - to = Integer::New(new_lines); - return scope.Close(to); -} - -Persistent GitDiffRange::constructor_template; diff --git a/src/functions/copy.cc b/src/functions/copy.cc deleted file mode 100644 index d534c3da0..000000000 --- a/src/functions/copy.cc +++ /dev/null @@ -1,53 +0,0 @@ -#include -#include - -#include "git2.h" - -const git_error *git_error_dup(const git_error *arg) { - git_error *result = (git_error *)malloc(sizeof(git_error)); - result->klass = arg->klass; - result->message = strdup(arg->message); - return result; -} - -const git_oid *git_oid_dup(const git_oid *arg) { - git_oid *result = (git_oid *)malloc(sizeof(git_oid)); - git_oid_cpy(result, arg); - return result; -} - -const git_index_entry *git_index_entry_dup(const git_index_entry *arg) { - git_index_entry *result = (git_index_entry *)malloc(sizeof(git_index_entry)); - *result = *arg; - return result; -} - -const git_index_time *git_index_time_dup(const git_index_time *arg) { - git_index_time *result = (git_index_time *)malloc(sizeof(git_index_time)); - *result = (const git_index_time) *arg; - return result; -} - -const git_time *git_time_dup(const git_time *arg) { - git_time *result = (git_time *)malloc(sizeof(git_time)); - *result = *arg; - return result; -} - -const git_diff_delta *git_diff_delta_dup(const git_diff_delta *arg) { - git_diff_delta *result = (git_diff_delta *)malloc(sizeof(git_diff_delta)); - *result = *arg; - return result; -} - -const git_diff_file *git_diff_file_dup(const git_diff_file *arg) { - git_diff_file *result = (git_diff_file *)malloc(sizeof(git_diff_file)); - *result = *arg; - return result; -} - -const git_diff_range *git_diff_range_dup(const git_diff_range *arg) { - git_diff_range *result = (git_diff_range *)malloc(sizeof(git_diff_range)); - *result = *arg; - return result; -} diff --git a/src/index.cc b/src/index.cc deleted file mode 100644 index adeaf91be..000000000 --- a/src/index.cc +++ /dev/null @@ -1,898 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ -#include -#include -#include - -#include "git2.h" - -#include "../include/functions/copy.h" - -#include "../include/index.h" -#include "../include/oid.h" -#include "../include/repo.h" -#include "../include/tree.h" -#include "../include/diff_list.h" -#include "../include/diff_options.h" -#include "../include/index_entry.h" - -using namespace v8; -using namespace node; - -GitIndex::GitIndex(git_index *raw) { - this->raw = raw; -} - -GitIndex::~GitIndex() { - git_index_free(this->raw); -} - -void GitIndex::Initialize(Handle target) { - HandleScope scope; - - Local tpl = FunctionTemplate::New(New); - - tpl->InstanceTemplate()->SetInternalFieldCount(1); - tpl->SetClassName(String::NewSymbol("Index")); - - NODE_SET_METHOD(tpl, "open", Open); - NODE_SET_PROTOTYPE_METHOD(tpl, "read", Read); - NODE_SET_PROTOTYPE_METHOD(tpl, "write", Write); - NODE_SET_PROTOTYPE_METHOD(tpl, "readTree", ReadTree); - NODE_SET_PROTOTYPE_METHOD(tpl, "writeTree", WriteTree); - NODE_SET_PROTOTYPE_METHOD(tpl, "size", Size); - NODE_SET_PROTOTYPE_METHOD(tpl, "clear", Clear); - NODE_SET_PROTOTYPE_METHOD(tpl, "entry", Entry); - NODE_SET_PROTOTYPE_METHOD(tpl, "remove", Remove); - NODE_SET_PROTOTYPE_METHOD(tpl, "removeDirectory", RemoveDirectory); - NODE_SET_PROTOTYPE_METHOD(tpl, "addByPath", AddBypath); - NODE_SET_PROTOTYPE_METHOD(tpl, "removeByPath", RemoveBypath); - NODE_SET_PROTOTYPE_METHOD(tpl, "find", Find); - NODE_SET_PROTOTYPE_METHOD(tpl, "conflictRemove", ConflictRemove); - NODE_SET_PROTOTYPE_METHOD(tpl, "conflictCleanup", ConflictCleanup); - NODE_SET_PROTOTYPE_METHOD(tpl, "hasConflicts", HasConflicts); - NODE_SET_METHOD(tpl, "indexToWorkdir", IndexToWorkdir); - - - constructor_template = Persistent::New(tpl->GetFunction()); - target->Set(String::NewSymbol("Index"), constructor_template); -} - -Handle GitIndex::New(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsExternal()) { - return ThrowException(Exception::Error(String::New("git_index is required."))); - } - - GitIndex* object = new GitIndex((git_index *) External::Unwrap(args[0])); - object->Wrap(args.This()); - - return scope.Close(args.This()); -} - -Handle GitIndex::New(void *raw) { - HandleScope scope; - Handle argv[1] = { External::New((void *)raw) }; - return scope.Close(GitIndex::constructor_template->NewInstance(1, argv)); -} - -git_index *GitIndex::GetValue() { - return this->raw; -} - - -#include "../include/functions/copy.h" - -/** - * @param {String} index_path - * @param {Index} callback - */ -Handle GitIndex::Open(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String index_path is required."))); - } - - if (args.Length() == 1 || !args[1]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - OpenBaton* baton = new OpenBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->index_pathReference = Persistent::New(args[0]); - const char * from_index_path; - String::Utf8Value index_path(args[0]->ToString()); - from_index_path = strdup(*index_path); - baton->index_path = from_index_path; - baton->callback = Persistent::New(Local::Cast(args[1])); - - uv_queue_work(uv_default_loop(), &baton->request, OpenWork, (uv_after_work_cb)OpenAfterWork); - - return Undefined(); -} - -void GitIndex::OpenWork(uv_work_t *req) { - OpenBaton *baton = static_cast(req->data); - int result = git_index_open( - &baton->out, - baton->index_path - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitIndex::OpenAfterWork(uv_work_t *req) { - HandleScope scope; - OpenBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle to; - if (baton->out != NULL) { - to = GitIndex::New((void *)baton->out); - } else { - to = Null(); - } - Handle result = to; - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->index_pathReference.Dispose(); - baton->callback.Dispose(); - free((void *)baton->index_path); - delete baton; -} - -#include "../include/functions/copy.h" - -/** - */ -Handle GitIndex::Read(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - ReadBaton* baton = new ReadBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->indexReference = Persistent::New(args.This()); - baton->index = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->callback = Persistent::New(Local::Cast(args[0])); - - uv_queue_work(uv_default_loop(), &baton->request, ReadWork, (uv_after_work_cb)ReadAfterWork); - - return Undefined(); -} - -void GitIndex::ReadWork(uv_work_t *req) { - ReadBaton *baton = static_cast(req->data); - int result = git_index_read( - baton->index - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitIndex::ReadAfterWork(uv_work_t *req) { - HandleScope scope; - ReadBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle result = Local::New(Undefined()); - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->indexReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -#include "../include/functions/copy.h" - -/** - */ -Handle GitIndex::Write(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - WriteBaton* baton = new WriteBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->indexReference = Persistent::New(args.This()); - baton->index = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->callback = Persistent::New(Local::Cast(args[0])); - - uv_queue_work(uv_default_loop(), &baton->request, WriteWork, (uv_after_work_cb)WriteAfterWork); - - return Undefined(); -} - -void GitIndex::WriteWork(uv_work_t *req) { - WriteBaton *baton = static_cast(req->data); - int result = git_index_write( - baton->index - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitIndex::WriteAfterWork(uv_work_t *req) { - HandleScope scope; - WriteBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle result = Local::New(Undefined()); - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->indexReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -#include "../include/functions/copy.h" - -/** - * @param {Tree} tree - */ -Handle GitIndex::ReadTree(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("Tree tree is required."))); - } - - if (args.Length() == 1 || !args[1]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - ReadTreeBaton* baton = new ReadTreeBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->indexReference = Persistent::New(args.This()); - baton->index = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->treeReference = Persistent::New(args[0]); - const git_tree * from_tree; - from_tree = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - baton->tree = from_tree; - baton->callback = Persistent::New(Local::Cast(args[1])); - - uv_queue_work(uv_default_loop(), &baton->request, ReadTreeWork, (uv_after_work_cb)ReadTreeAfterWork); - - return Undefined(); -} - -void GitIndex::ReadTreeWork(uv_work_t *req) { - ReadTreeBaton *baton = static_cast(req->data); - int result = git_index_read_tree( - baton->index, - baton->tree - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitIndex::ReadTreeAfterWork(uv_work_t *req) { - HandleScope scope; - ReadTreeBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle result = Local::New(Undefined()); - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->indexReference.Dispose(); - baton->treeReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -#include "../include/functions/copy.h" - -/** - * @param {Oid} callback - */ -Handle GitIndex::WriteTree(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - WriteTreeBaton* baton = new WriteTreeBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->out = (git_oid *)malloc(sizeof(git_oid )); - baton->indexReference = Persistent::New(args.This()); - baton->index = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->callback = Persistent::New(Local::Cast(args[0])); - - uv_queue_work(uv_default_loop(), &baton->request, WriteTreeWork, (uv_after_work_cb)WriteTreeAfterWork); - - return Undefined(); -} - -void GitIndex::WriteTreeWork(uv_work_t *req) { - WriteTreeBaton *baton = static_cast(req->data); - int result = git_index_write_tree( - baton->out, - baton->index - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitIndex::WriteTreeAfterWork(uv_work_t *req) { - HandleScope scope; - WriteTreeBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle to; - if (baton->out != NULL) { - to = GitOid::New((void *)baton->out); - } else { - to = Null(); - } - Handle result = to; - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - free(baton->out); - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->indexReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -/** - * @return {Number} result - */ -Handle GitIndex::Size(const Arguments& args) { - HandleScope scope; - - - size_t result = git_index_entrycount( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - to = Uint32::New(result); - return scope.Close(to); -} - -/** - */ -Handle GitIndex::Clear(const Arguments& args) { - HandleScope scope; - - - git_index_clear( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - return Undefined(); -} - -/** - * @param {Number} n - * @return {IndexEntry} result - */ -Handle GitIndex::Entry(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsUint32()) { - return ThrowException(Exception::Error(String::New("Number n is required."))); - } - - size_t from_n; - from_n = (size_t) args[0]->ToUint32()->Value(); - - const git_index_entry * result = git_index_get_byindex( - ObjectWrap::Unwrap(args.This())->GetValue() - , from_n - ); - - Handle to; - if (result != NULL) { - result = (const git_index_entry * )git_index_entry_dup(result); - } - if (result != NULL) { - to = GitIndexEntry::New((void *)result); - } else { - to = Null(); - } - return scope.Close(to); -} - -/** - * @param {String} path - * @param {Number} stage - */ -Handle GitIndex::Remove(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String path is required."))); - } - if (args.Length() == 1 || !args[1]->IsInt32()) { - return ThrowException(Exception::Error(String::New("Number stage is required."))); - } - - const char * from_path; - String::Utf8Value path(args[0]->ToString()); - from_path = strdup(*path); - int from_stage; - from_stage = (int) args[1]->ToInt32()->Value(); - - int result = git_index_remove( - ObjectWrap::Unwrap(args.This())->GetValue() - , from_path - , from_stage - ); - free((void *)from_path); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - return Undefined(); -} - -/** - * @param {String} dir - * @param {Number} stage - */ -Handle GitIndex::RemoveDirectory(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String dir is required."))); - } - if (args.Length() == 1 || !args[1]->IsInt32()) { - return ThrowException(Exception::Error(String::New("Number stage is required."))); - } - - const char * from_dir; - String::Utf8Value dir(args[0]->ToString()); - from_dir = strdup(*dir); - int from_stage; - from_stage = (int) args[1]->ToInt32()->Value(); - - int result = git_index_remove_directory( - ObjectWrap::Unwrap(args.This())->GetValue() - , from_dir - , from_stage - ); - free((void *)from_dir); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - return Undefined(); -} - -#include "../include/functions/copy.h" - -/** - * @param {String} path - */ -Handle GitIndex::AddBypath(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String path is required."))); - } - - if (args.Length() == 1 || !args[1]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - AddBypathBaton* baton = new AddBypathBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->indexReference = Persistent::New(args.This()); - baton->index = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->pathReference = Persistent::New(args[0]); - const char * from_path; - String::Utf8Value path(args[0]->ToString()); - from_path = strdup(*path); - baton->path = from_path; - baton->callback = Persistent::New(Local::Cast(args[1])); - - uv_queue_work(uv_default_loop(), &baton->request, AddBypathWork, (uv_after_work_cb)AddBypathAfterWork); - - return Undefined(); -} - -void GitIndex::AddBypathWork(uv_work_t *req) { - AddBypathBaton *baton = static_cast(req->data); - int result = git_index_add_bypath( - baton->index, - baton->path - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitIndex::AddBypathAfterWork(uv_work_t *req) { - HandleScope scope; - AddBypathBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle result = Local::New(Undefined()); - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->indexReference.Dispose(); - baton->pathReference.Dispose(); - baton->callback.Dispose(); - free((void *)baton->path); - delete baton; -} - -/** - * @param {String} path - */ -Handle GitIndex::RemoveBypath(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String path is required."))); - } - - const char * from_path; - String::Utf8Value path(args[0]->ToString()); - from_path = strdup(*path); - - int result = git_index_remove_bypath( - ObjectWrap::Unwrap(args.This())->GetValue() - , from_path - ); - free((void *)from_path); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - return Undefined(); -} - -/** - * @param {String} path - * @return {Number} at_pos - */ -Handle GitIndex::Find(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String path is required."))); - } - - size_t at_pos = 0; - const char * from_path; - String::Utf8Value path(args[0]->ToString()); - from_path = strdup(*path); - - int result = git_index_find( - &at_pos - , ObjectWrap::Unwrap(args.This())->GetValue() - , from_path - ); - free((void *)from_path); - - Handle to; - to = Uint32::New(at_pos); - return scope.Close(to); -} - -/** - * @param {String} path - */ -Handle GitIndex::ConflictRemove(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String path is required."))); - } - - const char * from_path; - String::Utf8Value path(args[0]->ToString()); - from_path = strdup(*path); - - int result = git_index_conflict_remove( - ObjectWrap::Unwrap(args.This())->GetValue() - , from_path - ); - free((void *)from_path); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - return Undefined(); -} - -/** - */ -Handle GitIndex::ConflictCleanup(const Arguments& args) { - HandleScope scope; - - - git_index_conflict_cleanup( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - return Undefined(); -} - -/** - * @return {Number} result - */ -Handle GitIndex::HasConflicts(const Arguments& args) { - HandleScope scope; - - - int result = git_index_has_conflicts( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - to = Int32::New(result); - return scope.Close(to); -} - -#include "../include/functions/copy.h" - -/** - * @param {Repository} repo - * @param {Index} index - * @param {DiffOptions} opts - * @param {DiffList} callback - */ -Handle GitIndex::IndexToWorkdir(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("Repository repo is required."))); - } - - if (args.Length() == 3 || !args[3]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - IndexToWorkdirBaton* baton = new IndexToWorkdirBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->repoReference = Persistent::New(args[0]); - git_repository * from_repo; - from_repo = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - baton->repo = from_repo; - baton->indexReference = Persistent::New(args[1]); - git_index * from_index; - if (args[1]->IsObject()) { - from_index = ObjectWrap::Unwrap(args[1]->ToObject())->GetValue(); - } else { - from_index = 0; - } - baton->index = from_index; - baton->optsReference = Persistent::New(args[2]); - const git_diff_options * from_opts; - if (args[2]->IsObject()) { - from_opts = ObjectWrap::Unwrap(args[2]->ToObject())->GetValue(); - } else { - from_opts = 0; - } - baton->opts = from_opts; - baton->callback = Persistent::New(Local::Cast(args[3])); - - uv_queue_work(uv_default_loop(), &baton->request, IndexToWorkdirWork, (uv_after_work_cb)IndexToWorkdirAfterWork); - - return Undefined(); -} - -void GitIndex::IndexToWorkdirWork(uv_work_t *req) { - IndexToWorkdirBaton *baton = static_cast(req->data); - int result = git_diff_index_to_workdir( - &baton->diff, - baton->repo, - baton->index, - baton->opts - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitIndex::IndexToWorkdirAfterWork(uv_work_t *req) { - HandleScope scope; - IndexToWorkdirBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle to; - if (baton->diff != NULL) { - to = GitDiffList::New((void *)baton->diff); - } else { - to = Null(); - } - Handle result = to; - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->repoReference.Dispose(); - baton->indexReference.Dispose(); - baton->optsReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -Persistent GitIndex::constructor_template; diff --git a/src/index_entry.cc b/src/index_entry.cc deleted file mode 100644 index 933ffafda..000000000 --- a/src/index_entry.cc +++ /dev/null @@ -1,230 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ -#include -#include -#include - -#include "git2.h" - -#include "../include/functions/copy.h" - -#include "../include/index_entry.h" -#include "../include/index_time.h" -#include "../include/oid.h" - -using namespace v8; -using namespace node; - -GitIndexEntry::GitIndexEntry(git_index_entry *raw) { - this->raw = raw; -} - -GitIndexEntry::~GitIndexEntry() { - free(this->raw); -} - -void GitIndexEntry::Initialize(Handle target) { - HandleScope scope; - - Local tpl = FunctionTemplate::New(New); - - tpl->InstanceTemplate()->SetInternalFieldCount(1); - tpl->SetClassName(String::NewSymbol("IndexEntry")); - - - NODE_SET_PROTOTYPE_METHOD(tpl, "ctime", Ctime); - NODE_SET_PROTOTYPE_METHOD(tpl, "mtime", Mtime); - NODE_SET_PROTOTYPE_METHOD(tpl, "dev", Dev); - NODE_SET_PROTOTYPE_METHOD(tpl, "ino", Ino); - NODE_SET_PROTOTYPE_METHOD(tpl, "mode", Mode); - NODE_SET_PROTOTYPE_METHOD(tpl, "uid", Uid); - NODE_SET_PROTOTYPE_METHOD(tpl, "gid", gid); - NODE_SET_PROTOTYPE_METHOD(tpl, "file_size", FileSize); - NODE_SET_PROTOTYPE_METHOD(tpl, "oid", Oid); - NODE_SET_PROTOTYPE_METHOD(tpl, "flags", Flags); - NODE_SET_PROTOTYPE_METHOD(tpl, "flags_extended", FlagsExtended); - NODE_SET_PROTOTYPE_METHOD(tpl, "path", Path); - - constructor_template = Persistent::New(tpl->GetFunction()); - target->Set(String::NewSymbol("IndexEntry"), constructor_template); -} - -Handle GitIndexEntry::New(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsExternal()) { - return ThrowException(Exception::Error(String::New("git_index_entry is required."))); - } - - GitIndexEntry* object = new GitIndexEntry((git_index_entry *) External::Unwrap(args[0])); - object->Wrap(args.This()); - - return scope.Close(args.This()); -} - -Handle GitIndexEntry::New(void *raw) { - HandleScope scope; - Handle argv[1] = { External::New((void *)raw) }; - return scope.Close(GitIndexEntry::constructor_template->NewInstance(1, argv)); -} - -git_index_entry *GitIndexEntry::GetValue() { - return this->raw; -} - - -Handle GitIndexEntry::Ctime(const Arguments& args) { - HandleScope scope; - Handle to; - - git_index_time *ctime = - &ObjectWrap::Unwrap(args.This())->GetValue()->ctime; - - if (ctime != NULL) { - ctime = (git_index_time *)git_index_time_dup(ctime); - } - if (ctime != NULL) { - to = GitIndexTime::New((void *)ctime); - } else { - to = Null(); - } - return scope.Close(to); -} - -Handle GitIndexEntry::Mtime(const Arguments& args) { - HandleScope scope; - Handle to; - - git_index_time *mtime = - &ObjectWrap::Unwrap(args.This())->GetValue()->mtime; - - if (mtime != NULL) { - mtime = (git_index_time *)git_index_time_dup(mtime); - } - if (mtime != NULL) { - to = GitIndexTime::New((void *)mtime); - } else { - to = Null(); - } - return scope.Close(to); -} - -Handle GitIndexEntry::Dev(const Arguments& args) { - HandleScope scope; - Handle to; - - unsigned int dev = - ObjectWrap::Unwrap(args.This())->GetValue()->dev; - - to = Uint32::New(dev); - return scope.Close(to); -} - -Handle GitIndexEntry::Ino(const Arguments& args) { - HandleScope scope; - Handle to; - - unsigned int ino = - ObjectWrap::Unwrap(args.This())->GetValue()->ino; - - to = Uint32::New(ino); - return scope.Close(to); -} - -Handle GitIndexEntry::Mode(const Arguments& args) { - HandleScope scope; - Handle to; - - uint16_t mode = - ObjectWrap::Unwrap(args.This())->GetValue()->mode; - - to = Integer::New(mode); - return scope.Close(to); -} - -Handle GitIndexEntry::Uid(const Arguments& args) { - HandleScope scope; - Handle to; - - unsigned int uid = - ObjectWrap::Unwrap(args.This())->GetValue()->uid; - - to = Uint32::New(uid); - return scope.Close(to); -} - -Handle GitIndexEntry::gid(const Arguments& args) { - HandleScope scope; - Handle to; - - unsigned int gid = - ObjectWrap::Unwrap(args.This())->GetValue()->gid; - - to = Uint32::New(gid); - return scope.Close(to); -} - -Handle GitIndexEntry::FileSize(const Arguments& args) { - HandleScope scope; - Handle to; - - unsigned int file_size = - ObjectWrap::Unwrap(args.This())->GetValue()->file_size; - - to = Uint32::New(file_size); - return scope.Close(to); -} - -Handle GitIndexEntry::Oid(const Arguments& args) { - HandleScope scope; - Handle to; - - git_oid *oid = - &ObjectWrap::Unwrap(args.This())->GetValue()->oid; - - if (oid != NULL) { - oid = (git_oid *)git_oid_dup(oid); - } - if (oid != NULL) { - to = GitOid::New((void *)oid); - } else { - to = Null(); - } - return scope.Close(to); -} - -Handle GitIndexEntry::Flags(const Arguments& args) { - HandleScope scope; - Handle to; - - uint16_t flags = - ObjectWrap::Unwrap(args.This())->GetValue()->flags; - - to = Integer::New(flags); - return scope.Close(to); -} - -Handle GitIndexEntry::FlagsExtended(const Arguments& args) { - HandleScope scope; - Handle to; - - uint16_t flags_extended = - ObjectWrap::Unwrap(args.This())->GetValue()->flags_extended; - - to = Integer::New(flags_extended); - return scope.Close(to); -} - -Handle GitIndexEntry::Path(const Arguments& args) { - HandleScope scope; - Handle to; - - char * path = - ObjectWrap::Unwrap(args.This())->GetValue()->path; - - to = String::New(path); - return scope.Close(to); -} - -Persistent GitIndexEntry::constructor_template; diff --git a/src/index_time.cc b/src/index_time.cc deleted file mode 100644 index 5501bd61a..000000000 --- a/src/index_time.cc +++ /dev/null @@ -1,87 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ -#include -#include -#include - -#include "git2.h" - -#include "../include/functions/copy.h" - -#include "../include/index_time.h" - -using namespace v8; -using namespace node; - -GitIndexTime::GitIndexTime(git_index_time *raw) { - this->raw = raw; -} - -GitIndexTime::~GitIndexTime() { - free(this->raw); -} - -void GitIndexTime::Initialize(Handle target) { - HandleScope scope; - - Local tpl = FunctionTemplate::New(New); - - tpl->InstanceTemplate()->SetInternalFieldCount(1); - tpl->SetClassName(String::NewSymbol("IndexTime")); - - - NODE_SET_PROTOTYPE_METHOD(tpl, "seconds", Seconds); - NODE_SET_PROTOTYPE_METHOD(tpl, "nanoseconds", Nanoseconds); - - constructor_template = Persistent::New(tpl->GetFunction()); - target->Set(String::NewSymbol("IndexTime"), constructor_template); -} - -Handle GitIndexTime::New(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsExternal()) { - return ThrowException(Exception::Error(String::New("git_index_time is required."))); - } - - GitIndexTime* object = new GitIndexTime((git_index_time *) External::Unwrap(args[0])); - object->Wrap(args.This()); - - return scope.Close(args.This()); -} - -Handle GitIndexTime::New(void *raw) { - HandleScope scope; - Handle argv[1] = { External::New((void *)raw) }; - return scope.Close(GitIndexTime::constructor_template->NewInstance(1, argv)); -} - -git_index_time *GitIndexTime::GetValue() { - return this->raw; -} - - -Handle GitIndexTime::Seconds(const Arguments& args) { - HandleScope scope; - Handle to; - - git_time_t seconds = - ObjectWrap::Unwrap(args.This())->GetValue()->seconds; - - to = Uint32::New(seconds); - return scope.Close(to); -} - -Handle GitIndexTime::Nanoseconds(const Arguments& args) { - HandleScope scope; - Handle to; - - unsigned int nanoseconds = - ObjectWrap::Unwrap(args.This())->GetValue()->nanoseconds; - - to = Uint32::New(nanoseconds); - return scope.Close(to); -} - -Persistent GitIndexTime::constructor_template; diff --git a/src/object.cc b/src/object.cc deleted file mode 100644 index be1895226..000000000 --- a/src/object.cc +++ /dev/null @@ -1,194 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ -#include -#include -#include - -#include "git2.h" - -#include "../include/functions/copy.h" - -#include "../include/object.h" -#include "../include/oid.h" -#include "../include/repo.h" - -using namespace v8; -using namespace node; - -GitObject::GitObject(git_object *raw) { - this->raw = raw; -} - -GitObject::~GitObject() { - git_object_free(this->raw); -} - -void GitObject::Initialize(Handle target) { - HandleScope scope; - - Local tpl = FunctionTemplate::New(New); - - tpl->InstanceTemplate()->SetInternalFieldCount(1); - tpl->SetClassName(String::NewSymbol("Object")); - - NODE_SET_PROTOTYPE_METHOD(tpl, "oid", Oid); - NODE_SET_PROTOTYPE_METHOD(tpl, "type", Type); - NODE_SET_PROTOTYPE_METHOD(tpl, "peel", Peel); - - - constructor_template = Persistent::New(tpl->GetFunction()); - target->Set(String::NewSymbol("Object"), constructor_template); -} - -Handle GitObject::New(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsExternal()) { - return ThrowException(Exception::Error(String::New("git_object is required."))); - } - - GitObject* object = new GitObject((git_object *) External::Unwrap(args[0])); - object->Wrap(args.This()); - - return scope.Close(args.This()); -} - -Handle GitObject::New(void *raw) { - HandleScope scope; - Handle argv[1] = { External::New((void *)raw) }; - return scope.Close(GitObject::constructor_template->NewInstance(1, argv)); -} - -git_object *GitObject::GetValue() { - return this->raw; -} - - -/** - * @return {Oid} result - */ -Handle GitObject::Oid(const Arguments& args) { - HandleScope scope; - - - const git_oid * result = git_object_id( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - if (result != NULL) { - result = (const git_oid * )git_oid_dup(result); - } - if (result != NULL) { - to = GitOid::New((void *)result); - } else { - to = Null(); - } - return scope.Close(to); -} - -/** - * @return {Number} result - */ -Handle GitObject::Type(const Arguments& args) { - HandleScope scope; - - - git_otype result = git_object_type( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - to = Number::New(result); - return scope.Close(to); -} - -#include "../include/functions/copy.h" - -/** - * @param {Number} target_type - * @param {Object} callback - */ -Handle GitObject::Peel(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsInt32()) { - return ThrowException(Exception::Error(String::New("Number target_type is required."))); - } - - if (args.Length() == 1 || !args[1]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - PeelBaton* baton = new PeelBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->objectReference = Persistent::New(args.This()); - baton->object = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->target_typeReference = Persistent::New(args[0]); - git_otype from_target_type; - from_target_type = (git_otype) args[0]->ToInt32()->Value(); - baton->target_type = from_target_type; - baton->callback = Persistent::New(Local::Cast(args[1])); - - uv_queue_work(uv_default_loop(), &baton->request, PeelWork, (uv_after_work_cb)PeelAfterWork); - - return Undefined(); -} - -void GitObject::PeelWork(uv_work_t *req) { - PeelBaton *baton = static_cast(req->data); - int result = git_object_peel( - &baton->peeled, - baton->object, - baton->target_type - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitObject::PeelAfterWork(uv_work_t *req) { - HandleScope scope; - PeelBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle to; - if (baton->peeled != NULL) { - to = GitObject::New((void *)baton->peeled); - } else { - to = Null(); - } - Handle result = to; - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->objectReference.Dispose(); - baton->target_typeReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -Persistent GitObject::constructor_template; diff --git a/src/odb.cc b/src/odb.cc deleted file mode 100644 index 0ed9b605f..000000000 --- a/src/odb.cc +++ /dev/null @@ -1,607 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ -#include -#include -#include - -#include "git2.h" - -#include "../include/functions/copy.h" - -#include "../include/odb.h" -#include "../include/oid.h" -#include "../include/odb_object.h" -#include "node_buffer.h" - -using namespace v8; -using namespace node; - -GitOdb::GitOdb(git_odb *raw) { - this->raw = raw; -} - -GitOdb::~GitOdb() { - git_odb_free(this->raw); -} - -void GitOdb::Initialize(Handle target) { - HandleScope scope; - - Local tpl = FunctionTemplate::New(New); - - tpl->InstanceTemplate()->SetInternalFieldCount(1); - tpl->SetClassName(String::NewSymbol("Odb")); - - NODE_SET_METHOD(tpl, "create()", Create); - NODE_SET_METHOD(tpl, "open", Open); - NODE_SET_PROTOTYPE_METHOD(tpl, "addDiskAlternate", AddDiskAlternate); - NODE_SET_PROTOTYPE_METHOD(tpl, "read", Read); - NODE_SET_METHOD(tpl, "readPrefix", ReadPrefix); - NODE_SET_METHOD(tpl, "readHeader", ReadHeader); - NODE_SET_PROTOTYPE_METHOD(tpl, "exists", Exists); - NODE_SET_PROTOTYPE_METHOD(tpl, "refresh", Refresh); - NODE_SET_PROTOTYPE_METHOD(tpl, "write", Write); - NODE_SET_METHOD(tpl, "hash", Hash); - NODE_SET_METHOD(tpl, "hashfile", Hashfile); - - - constructor_template = Persistent::New(tpl->GetFunction()); - target->Set(String::NewSymbol("Odb"), constructor_template); -} - -Handle GitOdb::New(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsExternal()) { - return ThrowException(Exception::Error(String::New("git_odb is required."))); - } - - GitOdb* object = new GitOdb((git_odb *) External::Unwrap(args[0])); - object->Wrap(args.This()); - - return scope.Close(args.This()); -} - -Handle GitOdb::New(void *raw) { - HandleScope scope; - Handle argv[1] = { External::New((void *)raw) }; - return scope.Close(GitOdb::constructor_template->NewInstance(1, argv)); -} - -git_odb *GitOdb::GetValue() { - return this->raw; -} - - -/** - * @return {Odb} out - */ -Handle GitOdb::Create(const Arguments& args) { - HandleScope scope; - - git_odb * out = 0; - - int result = git_odb_new( - &out - ); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - Handle to; - if (out != NULL) { - to = GitOdb::New((void *)out); - } else { - to = Null(); - } - return scope.Close(to); -} - -/** - * @param {String} objects_dir - * @return {Odb} out - */ -Handle GitOdb::Open(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String objects_dir is required."))); - } - - git_odb * out = 0; - const char * from_objects_dir; - String::Utf8Value objects_dir(args[0]->ToString()); - from_objects_dir = strdup(*objects_dir); - - int result = git_odb_open( - &out - , from_objects_dir - ); - free((void *)from_objects_dir); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - Handle to; - if (out != NULL) { - to = GitOdb::New((void *)out); - } else { - to = Null(); - } - return scope.Close(to); -} - -/** - * @param {String} path - */ -Handle GitOdb::AddDiskAlternate(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String path is required."))); - } - - const char * from_path; - String::Utf8Value path(args[0]->ToString()); - from_path = strdup(*path); - - int result = git_odb_add_disk_alternate( - ObjectWrap::Unwrap(args.This())->GetValue() - , from_path - ); - free((void *)from_path); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - return Undefined(); -} - -#include "../include/functions/copy.h" - -/** - * @param {Oid} id - * @param {OdbObject} callback - */ -Handle GitOdb::Read(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("Oid id is required."))); - } - - if (args.Length() == 1 || !args[1]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - ReadBaton* baton = new ReadBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->dbReference = Persistent::New(args.This()); - baton->db = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->idReference = Persistent::New(args[0]); - const git_oid * from_id; - from_id = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - baton->id = from_id; - baton->callback = Persistent::New(Local::Cast(args[1])); - - uv_queue_work(uv_default_loop(), &baton->request, ReadWork, (uv_after_work_cb)ReadAfterWork); - - return Undefined(); -} - -void GitOdb::ReadWork(uv_work_t *req) { - ReadBaton *baton = static_cast(req->data); - int result = git_odb_read( - &baton->out, - baton->db, - baton->id - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitOdb::ReadAfterWork(uv_work_t *req) { - HandleScope scope; - ReadBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle to; - if (baton->out != NULL) { - to = GitOdbObject::New((void *)baton->out); - } else { - to = Null(); - } - Handle result = to; - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->dbReference.Dispose(); - baton->idReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -/** - * @param {Odb} db - * @param {Oid} short_id - * @param {Number} len - * @return {OdbObject} out - */ -Handle GitOdb::ReadPrefix(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("Odb db is required."))); - } - if (args.Length() == 1 || !args[1]->IsObject()) { - return ThrowException(Exception::Error(String::New("Oid short_id is required."))); - } - if (args.Length() == 2 || !args[2]->IsUint32()) { - return ThrowException(Exception::Error(String::New("Number len is required."))); - } - - git_odb_object * out = 0; - git_odb * from_db; - from_db = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - const git_oid * from_short_id; - from_short_id = ObjectWrap::Unwrap(args[1]->ToObject())->GetValue(); - size_t from_len; - from_len = (size_t) args[2]->ToUint32()->Value(); - - int result = git_odb_read_prefix( - &out - , from_db - , from_short_id - , from_len - ); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - Handle to; - if (out != NULL) { - to = GitOdbObject::New((void *)out); - } else { - to = Null(); - } - return scope.Close(to); -} - -/** - * @param {Odb} db - * @param {Oid} id - * @return {Number} len_out - * @return {Number} type_out - */ -Handle GitOdb::ReadHeader(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("Odb db is required."))); - } - if (args.Length() == 1 || !args[1]->IsObject()) { - return ThrowException(Exception::Error(String::New("Oid id is required."))); - } - - size_t len_out = 0; - git_otype type_out = GIT_OBJ_ANY; - git_odb * from_db; - from_db = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - const git_oid * from_id; - from_id = ObjectWrap::Unwrap(args[1]->ToObject())->GetValue(); - - int result = git_odb_read_header( - &len_out - , &type_out - , from_db - , from_id - ); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - Handle toReturn = Object::New(); - Handle to; - to = Uint32::New(len_out); - toReturn->Set(String::NewSymbol("len_out"), to); - - to = Int32::New(type_out); - toReturn->Set(String::NewSymbol("type_out"), to); - - return scope.Close(toReturn); -} - -/** - * @param {Oid} id - */ -Handle GitOdb::Exists(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("Oid id is required."))); - } - - const git_oid * from_id; - from_id = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - - int result = git_odb_exists( - ObjectWrap::Unwrap(args.This())->GetValue() - , from_id - ); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - return Undefined(); -} - -/** - */ -Handle GitOdb::Refresh(const Arguments& args) { - HandleScope scope; - - - int result = git_odb_refresh( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - return Undefined(); -} - -#include "../include/functions/copy.h" - -/** - * @param {String} data - * @param {Number} len - * @param {Number} type - * @param {Oid} callback - */ -Handle GitOdb::Write(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String data is required."))); - } - if (args.Length() == 1 || !args[1]->IsUint32()) { - return ThrowException(Exception::Error(String::New("Number len is required."))); - } - if (args.Length() == 2 || !args[2]->IsInt32()) { - return ThrowException(Exception::Error(String::New("Number type is required."))); - } - - if (args.Length() == 3 || !args[3]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - WriteBaton* baton = new WriteBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->out = (git_oid *)malloc(sizeof(git_oid )); - baton->odbReference = Persistent::New(args.This()); - baton->odb = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->dataReference = Persistent::New(args[0]); - const void * from_data; - String::Utf8Value data(args[0]->ToString()); - from_data = strdup(*data); - baton->data = from_data; - baton->lenReference = Persistent::New(args[1]); - size_t from_len; - from_len = (size_t) args[1]->ToUint32()->Value(); - baton->len = from_len; - baton->typeReference = Persistent::New(args[2]); - git_otype from_type; - from_type = (git_otype) args[2]->ToInt32()->Value(); - baton->type = from_type; - baton->callback = Persistent::New(Local::Cast(args[3])); - - uv_queue_work(uv_default_loop(), &baton->request, WriteWork, (uv_after_work_cb)WriteAfterWork); - - return Undefined(); -} - -void GitOdb::WriteWork(uv_work_t *req) { - WriteBaton *baton = static_cast(req->data); - int result = git_odb_write( - baton->out, - baton->odb, - baton->data, - baton->len, - baton->type - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitOdb::WriteAfterWork(uv_work_t *req) { - HandleScope scope; - WriteBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle to; - if (baton->out != NULL) { - to = GitOid::New((void *)baton->out); - } else { - to = Null(); - } - Handle result = to; - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - free(baton->out); - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->odbReference.Dispose(); - baton->dataReference.Dispose(); - baton->lenReference.Dispose(); - baton->typeReference.Dispose(); - baton->callback.Dispose(); - free((void *)baton->data); - delete baton; -} - -/** - * @param {Buffer} data - * @param {Number} len - * @param {Number} type - * @return {Oid} out - */ -Handle GitOdb::Hash(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("Buffer data is required."))); - } - if (args.Length() == 1 || !args[1]->IsUint32()) { - return ThrowException(Exception::Error(String::New("Number len is required."))); - } - if (args.Length() == 2 || !args[2]->IsInt32()) { - return ThrowException(Exception::Error(String::New("Number type is required."))); - } - - git_oid *out = (git_oid *)malloc(sizeof(git_oid)); - const void * from_data; - from_data = Buffer::Data(args[0]->ToObject()); - size_t from_len; - from_len = (size_t) args[1]->ToUint32()->Value(); - git_otype from_type; - from_type = (git_otype) args[2]->ToInt32()->Value(); - - int result = git_odb_hash( - out - , from_data - , from_len - , from_type - ); - if (result != GIT_OK) { - free(out); - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - Handle to; - if (out != NULL) { - to = GitOid::New((void *)out); - } else { - to = Null(); - } - return scope.Close(to); -} - -/** - * @param {String} path - * @param {Number} type - * @return {Oid} out - */ -Handle GitOdb::Hashfile(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String path is required."))); - } - if (args.Length() == 1 || !args[1]->IsInt32()) { - return ThrowException(Exception::Error(String::New("Number type is required."))); - } - - git_oid *out = (git_oid *)malloc(sizeof(git_oid)); - const char * from_path; - String::Utf8Value path(args[0]->ToString()); - from_path = strdup(*path); - git_otype from_type; - from_type = (git_otype) args[1]->ToInt32()->Value(); - - int result = git_odb_hashfile( - out - , from_path - , from_type - ); - free((void *)from_path); - if (result != GIT_OK) { - free(out); - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - Handle to; - if (out != NULL) { - to = GitOid::New((void *)out); - } else { - to = Null(); - } - return scope.Close(to); -} - -Persistent GitOdb::constructor_template; diff --git a/src/odb_object.cc b/src/odb_object.cc deleted file mode 100644 index 507d63736..000000000 --- a/src/odb_object.cc +++ /dev/null @@ -1,144 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ -#include -#include -#include - -#include "git2.h" - -#include "../include/functions/copy.h" - -#include "../include/odb_object.h" -#include "../include/wrapper.h" -#include "../include/oid.h" - -using namespace v8; -using namespace node; - -GitOdbObject::GitOdbObject(git_odb_object *raw) { - this->raw = raw; -} - -GitOdbObject::~GitOdbObject() { - git_odb_object_free(this->raw); -} - -void GitOdbObject::Initialize(Handle target) { - HandleScope scope; - - Local tpl = FunctionTemplate::New(New); - - tpl->InstanceTemplate()->SetInternalFieldCount(1); - tpl->SetClassName(String::NewSymbol("OdbObject")); - - NODE_SET_PROTOTYPE_METHOD(tpl, "data", Data); - NODE_SET_PROTOTYPE_METHOD(tpl, "size", Size); - NODE_SET_PROTOTYPE_METHOD(tpl, "type", Type); - NODE_SET_PROTOTYPE_METHOD(tpl, "oid", Oid); - - - constructor_template = Persistent::New(tpl->GetFunction()); - target->Set(String::NewSymbol("OdbObject"), constructor_template); -} - -Handle GitOdbObject::New(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsExternal()) { - return ThrowException(Exception::Error(String::New("git_odb_object is required."))); - } - - GitOdbObject* object = new GitOdbObject((git_odb_object *) External::Unwrap(args[0])); - object->Wrap(args.This()); - - return scope.Close(args.This()); -} - -Handle GitOdbObject::New(void *raw) { - HandleScope scope; - Handle argv[1] = { External::New((void *)raw) }; - return scope.Close(GitOdbObject::constructor_template->NewInstance(1, argv)); -} - -git_odb_object *GitOdbObject::GetValue() { - return this->raw; -} - - -/** - * @return {Wrapper} result - */ -Handle GitOdbObject::Data(const Arguments& args) { - HandleScope scope; - - - const void * result = git_odb_object_data( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - if (result != NULL) { - to = Wrapper::New((void *)result); - } else { - to = Null(); - } - return scope.Close(to); -} - -/** - * @return {Number} result - */ -Handle GitOdbObject::Size(const Arguments& args) { - HandleScope scope; - - - size_t result = git_odb_object_size( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - to = Uint32::New(result); - return scope.Close(to); -} - -/** - * @return {Number} result - */ -Handle GitOdbObject::Type(const Arguments& args) { - HandleScope scope; - - - git_otype result = git_odb_object_type( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - to = Int32::New(result); - return scope.Close(to); -} - -/** - * @return {Oid} result - */ -Handle GitOdbObject::Oid(const Arguments& args) { - HandleScope scope; - - - const git_oid * result = git_odb_object_id( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - if (result != NULL) { - result = (const git_oid * )git_oid_dup(result); - } - if (result != NULL) { - to = GitOid::New((void *)result); - } else { - to = Null(); - } - return scope.Close(to); -} - -Persistent GitOdbObject::constructor_template; diff --git a/src/oid.cc b/src/oid.cc deleted file mode 100755 index 0f354998f..000000000 --- a/src/oid.cc +++ /dev/null @@ -1,120 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ -#include -#include -#include - -#include "git2.h" - -#include "../include/functions/copy.h" - -#include "../include/oid.h" - -using namespace v8; -using namespace node; - -GitOid::GitOid(git_oid *raw) { - this->raw = raw; -} - -GitOid::~GitOid() { - free(this->raw); -} - -void GitOid::Initialize(Handle target) { - HandleScope scope; - - Local tpl = FunctionTemplate::New(New); - - tpl->InstanceTemplate()->SetInternalFieldCount(1); - tpl->SetClassName(String::NewSymbol("Oid")); - - NODE_SET_METHOD(tpl, "fromString", FromString); - NODE_SET_PROTOTYPE_METHOD(tpl, "sha", Sha); - - - constructor_template = Persistent::New(tpl->GetFunction()); - target->Set(String::NewSymbol("Oid"), constructor_template); -} - -Handle GitOid::New(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsExternal()) { - return ThrowException(Exception::Error(String::New("git_oid is required."))); - } - - GitOid* object = new GitOid((git_oid *) External::Unwrap(args[0])); - object->Wrap(args.This()); - - return scope.Close(args.This()); -} - -Handle GitOid::New(void *raw) { - HandleScope scope; - Handle argv[1] = { External::New((void *)raw) }; - return scope.Close(GitOid::constructor_template->NewInstance(1, argv)); -} - -git_oid *GitOid::GetValue() { - return this->raw; -} - - -/** - * @param {String} str - * @return {Oid} out - */ -Handle GitOid::FromString(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String str is required."))); - } - - git_oid *out = (git_oid *)malloc(sizeof(git_oid)); - const char * from_str; - String::Utf8Value str(args[0]->ToString()); - from_str = strdup(*str); - - int result = git_oid_fromstr( - out - , from_str - ); - free((void *)from_str); - if (result != GIT_OK) { - free(out); - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - Handle to; - if (out != NULL) { - to = GitOid::New((void *)out); - } else { - to = Null(); - } - return scope.Close(to); -} - -/** - * @return {String} result - */ -Handle GitOid::Sha(const Arguments& args) { - HandleScope scope; - - - char * result = git_oid_allocfmt( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - to = String::New(result); - free(result); - return scope.Close(to); -} - -Persistent GitOid::constructor_template; diff --git a/src/patch.cc b/src/patch.cc deleted file mode 100644 index ce5e9d90f..000000000 --- a/src/patch.cc +++ /dev/null @@ -1,326 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ -#include -#include -#include - -#include "git2.h" - -#include "../include/functions/copy.h" - -#include "../include/patch.h" -#include "../include/delta.h" -#include "../include/diff_range.h" - -using namespace v8; -using namespace node; - -GitPatch::GitPatch(git_diff_patch *raw) { - this->raw = raw; -} - -GitPatch::~GitPatch() { - git_diff_patch_free(this->raw); -} - -void GitPatch::Initialize(Handle target) { - HandleScope scope; - - Local tpl = FunctionTemplate::New(New); - - tpl->InstanceTemplate()->SetInternalFieldCount(1); - tpl->SetClassName(String::NewSymbol("Patch")); - - NODE_SET_PROTOTYPE_METHOD(tpl, "delta", Delta); - NODE_SET_PROTOTYPE_METHOD(tpl, "size", Size); - NODE_SET_PROTOTYPE_METHOD(tpl, "stats", Stats); - NODE_SET_PROTOTYPE_METHOD(tpl, "hunk", Hunk); - NODE_SET_PROTOTYPE_METHOD(tpl, "lines", Lines); - NODE_SET_PROTOTYPE_METHOD(tpl, "line", Line); - NODE_SET_METHOD(tpl, "toString", ToString); - - - constructor_template = Persistent::New(tpl->GetFunction()); - target->Set(String::NewSymbol("Patch"), constructor_template); -} - -Handle GitPatch::New(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsExternal()) { - return ThrowException(Exception::Error(String::New("git_diff_patch is required."))); - } - - GitPatch* object = new GitPatch((git_diff_patch *) External::Unwrap(args[0])); - object->Wrap(args.This()); - - return scope.Close(args.This()); -} - -Handle GitPatch::New(void *raw) { - HandleScope scope; - Handle argv[1] = { External::New((void *)raw) }; - return scope.Close(GitPatch::constructor_template->NewInstance(1, argv)); -} - -git_diff_patch *GitPatch::GetValue() { - return this->raw; -} - - -/** - * @return {Delta} result - */ -Handle GitPatch::Delta(const Arguments& args) { - HandleScope scope; - - - const git_diff_delta * result = git_diff_patch_delta( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - if (result != NULL) { - result = (const git_diff_delta * )git_diff_delta_dup(result); - } - if (result != NULL) { - to = GitDelta::New((void *)result); - } else { - to = Null(); - } - return scope.Close(to); -} - -/** - * @return {Number} result - */ -Handle GitPatch::Size(const Arguments& args) { - HandleScope scope; - - - size_t result = git_diff_patch_num_hunks( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - to = Uint32::New(result); - return scope.Close(to); -} - -/** - * @return {Number} total_context - * @return {Number} total_additions - * @return {Number} total_deletions - */ -Handle GitPatch::Stats(const Arguments& args) { - HandleScope scope; - - size_t total_context = 0; - size_t total_additions = 0; - size_t total_deletions = 0; - - int result = git_diff_patch_line_stats( - &total_context - , &total_additions - , &total_deletions - , ObjectWrap::Unwrap(args.This())->GetValue() - ); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - Handle toReturn = Object::New(); - Handle to; - to = Integer::New(total_context); - toReturn->Set(String::NewSymbol("total_context"), to); - - to = Integer::New(total_additions); - toReturn->Set(String::NewSymbol("total_additions"), to); - - to = Integer::New(total_deletions); - toReturn->Set(String::NewSymbol("total_deletions"), to); - - return scope.Close(toReturn); -} - -/** - * @param {Number} hunk_idx - * @return {DiffRange} range - * @return {String} header - * @return {Number} header_len - * @return {Number} lines_in_hunk - */ -Handle GitPatch::Hunk(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsUint32()) { - return ThrowException(Exception::Error(String::New("Number hunk_idx is required."))); - } - - const git_diff_range * range = 0; - const char * header = 0; - size_t header_len = 0; - size_t lines_in_hunk = 0; - size_t from_hunk_idx; - from_hunk_idx = (size_t) args[0]->ToUint32()->Value(); - - int result = git_diff_patch_get_hunk( - &range - , &header - , &header_len - , &lines_in_hunk - , ObjectWrap::Unwrap(args.This())->GetValue() - , from_hunk_idx - ); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - Handle toReturn = Object::New(); - Handle to; - if (range != NULL) { - range = (const git_diff_range * )git_diff_range_dup(range); - } - if (range != NULL) { - to = GitDiffRange::New((void *)range); - } else { - to = Null(); - } - toReturn->Set(String::NewSymbol("range"), to); - - to = String::New(header); - toReturn->Set(String::NewSymbol("header"), to); - - to = Uint32::New(header_len); - toReturn->Set(String::NewSymbol("headerLength"), to); - - to = Uint32::New(lines_in_hunk); - toReturn->Set(String::NewSymbol("lines"), to); - - return scope.Close(toReturn); -} - -/** - * @param {Number} hunk_idx - * @return {Number} result - */ -Handle GitPatch::Lines(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsUint32()) { - return ThrowException(Exception::Error(String::New("Number hunk_idx is required."))); - } - - size_t from_hunk_idx; - from_hunk_idx = (size_t) args[0]->ToUint32()->Value(); - - int result = git_diff_patch_num_lines_in_hunk( - ObjectWrap::Unwrap(args.This())->GetValue() - , from_hunk_idx - ); - - Handle to; - to = Int32::New(result); - return scope.Close(to); -} - -/** - * @param {Number} hunk_idx - * @param {Number} line_of_hunk - * @return {Number} line_origin - * @return {String} content - * @return {Number} content_len - * @return {Number} old_lineno - * @return {Number} new_lineno - */ -Handle GitPatch::Line(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsUint32()) { - return ThrowException(Exception::Error(String::New("Number hunk_idx is required."))); - } - if (args.Length() == 1 || !args[1]->IsUint32()) { - return ThrowException(Exception::Error(String::New("Number line_of_hunk is required."))); - } - - char line_origin = 0; - const char * content = 0; - size_t content_len = 0; - int old_lineno = 0; - int new_lineno = 0; - size_t from_hunk_idx; - from_hunk_idx = (size_t) args[0]->ToUint32()->Value(); - size_t from_line_of_hunk; - from_line_of_hunk = (size_t) args[1]->ToUint32()->Value(); - - int result = git_diff_patch_get_line_in_hunk( - &line_origin - , &content - , &content_len - , &old_lineno - , &new_lineno - , ObjectWrap::Unwrap(args.This())->GetValue() - , from_hunk_idx - , from_line_of_hunk - ); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - Handle toReturn = Object::New(); - Handle to; - to = Integer::New(line_origin); - toReturn->Set(String::NewSymbol("lineOrigin"), to); - - to = String::New(content, content_len); - toReturn->Set(String::NewSymbol("content"), to); - - to = Uint32::New(content_len); - toReturn->Set(String::NewSymbol("length"), to); - - to = Int32::New(old_lineno); - toReturn->Set(String::NewSymbol("oldLineNumber"), to); - - to = Int32::New(new_lineno); - toReturn->Set(String::NewSymbol("newLineNumber"), to); - - return scope.Close(toReturn); -} - -/** - * @return {String} string - */ -Handle GitPatch::ToString(const Arguments& args) { - HandleScope scope; - - char * string = 0; - - int result = git_diff_patch_to_str( - &string - , ObjectWrap::Unwrap(args.This())->GetValue() - ); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - Handle to; - to = String::New(string); - free(string); - return scope.Close(to); -} - -Persistent GitPatch::constructor_template; diff --git a/src/refdb.cc b/src/refdb.cc deleted file mode 100644 index d1b344d76..000000000 --- a/src/refdb.cc +++ /dev/null @@ -1,63 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ -#include -#include -#include - -#include "git2.h" - -#include "../include/functions/copy.h" - -#include "../include/refdb.h" - -using namespace v8; -using namespace node; - -GitRefDb::GitRefDb(git_refdb *raw) { - this->raw = raw; -} - -GitRefDb::~GitRefDb() { - free(this->raw); -} - -void GitRefDb::Initialize(Handle target) { - HandleScope scope; - - Local tpl = FunctionTemplate::New(New); - - tpl->InstanceTemplate()->SetInternalFieldCount(1); - tpl->SetClassName(String::NewSymbol("RefDb")); - - - - constructor_template = Persistent::New(tpl->GetFunction()); - target->Set(String::NewSymbol("RefDb"), constructor_template); -} - -Handle GitRefDb::New(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsExternal()) { - return ThrowException(Exception::Error(String::New("git_refdb is required."))); - } - - GitRefDb* object = new GitRefDb((git_refdb *) External::Unwrap(args[0])); - object->Wrap(args.This()); - - return scope.Close(args.This()); -} - -Handle GitRefDb::New(void *raw) { - HandleScope scope; - Handle argv[1] = { External::New((void *)raw) }; - return scope.Close(GitRefDb::constructor_template->NewInstance(1, argv)); -} - -git_refdb *GitRefDb::GetValue() { - return this->raw; -} - - -Persistent GitRefDb::constructor_template; diff --git a/src/reference.cc b/src/reference.cc deleted file mode 100755 index 53a666230..000000000 --- a/src/reference.cc +++ /dev/null @@ -1,671 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ -#include -#include -#include - -#include "git2.h" - -#include "../include/functions/copy.h" - -#include "../include/reference.h" -#include "../include/repo.h" -#include "../include/oid.h" -#include "../include/object.h" - -using namespace v8; -using namespace node; - -GitReference::GitReference(git_reference *raw) { - this->raw = raw; -} - -GitReference::~GitReference() { - git_reference_free(this->raw); -} - -void GitReference::Initialize(Handle target) { - HandleScope scope; - - Local tpl = FunctionTemplate::New(New); - - tpl->InstanceTemplate()->SetInternalFieldCount(1); - tpl->SetClassName(String::NewSymbol("Reference")); - - NODE_SET_METHOD(tpl, "oidForName", OidForName); - NODE_SET_PROTOTYPE_METHOD(tpl, "target", Target); - NODE_SET_PROTOTYPE_METHOD(tpl, "symbolicTarget", SymbolicTarget); - NODE_SET_PROTOTYPE_METHOD(tpl, "type", Type); - NODE_SET_PROTOTYPE_METHOD(tpl, "name", Name); - NODE_SET_PROTOTYPE_METHOD(tpl, "resolve", Resolve); - NODE_SET_PROTOTYPE_METHOD(tpl, "setSymbolicTarget", SetSymbolicTarget); - NODE_SET_PROTOTYPE_METHOD(tpl, "setTarget", setTarget); - NODE_SET_PROTOTYPE_METHOD(tpl, "rename", Rename); - NODE_SET_PROTOTYPE_METHOD(tpl, "delete", Delete); - NODE_SET_PROTOTYPE_METHOD(tpl, "isBranch", IsBranch); - NODE_SET_PROTOTYPE_METHOD(tpl, "isRemote", IsRemote); - NODE_SET_PROTOTYPE_METHOD(tpl, "peel", Peel); - NODE_SET_METHOD(tpl, "isValidName", IsValidName); - - - constructor_template = Persistent::New(tpl->GetFunction()); - target->Set(String::NewSymbol("Reference"), constructor_template); -} - -Handle GitReference::New(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsExternal()) { - return ThrowException(Exception::Error(String::New("git_reference is required."))); - } - - GitReference* object = new GitReference((git_reference *) External::Unwrap(args[0])); - object->Wrap(args.This()); - - return scope.Close(args.This()); -} - -Handle GitReference::New(void *raw) { - HandleScope scope; - Handle argv[1] = { External::New((void *)raw) }; - return scope.Close(GitReference::constructor_template->NewInstance(1, argv)); -} - -git_reference *GitReference::GetValue() { - return this->raw; -} - - -#include "../include/functions/copy.h" - -/** - * @param {Repository} repo - * @param {String} name - * @param {Oid} callback - */ -Handle GitReference::OidForName(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("Repository repo is required."))); - } - if (args.Length() == 1 || !args[1]->IsString()) { - return ThrowException(Exception::Error(String::New("String name is required."))); - } - - if (args.Length() == 2 || !args[2]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - OidForNameBaton* baton = new OidForNameBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->out = (git_oid *)malloc(sizeof(git_oid )); - baton->repoReference = Persistent::New(args[0]); - git_repository * from_repo; - from_repo = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - baton->repo = from_repo; - baton->nameReference = Persistent::New(args[1]); - const char * from_name; - String::Utf8Value name(args[1]->ToString()); - from_name = strdup(*name); - baton->name = from_name; - baton->callback = Persistent::New(Local::Cast(args[2])); - - uv_queue_work(uv_default_loop(), &baton->request, OidForNameWork, (uv_after_work_cb)OidForNameAfterWork); - - return Undefined(); -} - -void GitReference::OidForNameWork(uv_work_t *req) { - OidForNameBaton *baton = static_cast(req->data); - int result = git_reference_name_to_id( - baton->out, - baton->repo, - baton->name - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitReference::OidForNameAfterWork(uv_work_t *req) { - HandleScope scope; - OidForNameBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle to; - if (baton->out != NULL) { - to = GitOid::New((void *)baton->out); - } else { - to = Null(); - } - Handle result = to; - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - free(baton->out); - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->repoReference.Dispose(); - baton->nameReference.Dispose(); - baton->callback.Dispose(); - free((void *)baton->name); - delete baton; -} - -/** - * @return {Oid} result - */ -Handle GitReference::Target(const Arguments& args) { - HandleScope scope; - - - const git_oid * result = git_reference_target( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - if (result != NULL) { - result = (const git_oid * )git_oid_dup(result); - } - if (result != NULL) { - to = GitOid::New((void *)result); - } else { - to = Null(); - } - return scope.Close(to); -} - -/** - * @return {String} result - */ -Handle GitReference::SymbolicTarget(const Arguments& args) { - HandleScope scope; - - - const char * result = git_reference_symbolic_target( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - to = String::New(result); - return scope.Close(to); -} - -/** - * @return {Number} result - */ -Handle GitReference::Type(const Arguments& args) { - HandleScope scope; - - - git_ref_t result = git_reference_type( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - to = Number::New(result); - return scope.Close(to); -} - -/** - * @return {String} result - */ -Handle GitReference::Name(const Arguments& args) { - HandleScope scope; - - - const char * result = git_reference_name( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - to = String::New(result); - return scope.Close(to); -} - -#include "../include/functions/copy.h" - -/** - * @param {Reference} callback - */ -Handle GitReference::Resolve(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - ResolveBaton* baton = new ResolveBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->refReference = Persistent::New(args.This()); - baton->ref = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->callback = Persistent::New(Local::Cast(args[0])); - - uv_queue_work(uv_default_loop(), &baton->request, ResolveWork, (uv_after_work_cb)ResolveAfterWork); - - return Undefined(); -} - -void GitReference::ResolveWork(uv_work_t *req) { - ResolveBaton *baton = static_cast(req->data); - int result = git_reference_resolve( - &baton->out, - baton->ref - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitReference::ResolveAfterWork(uv_work_t *req) { - HandleScope scope; - ResolveBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle to; - if (baton->out != NULL) { - to = GitReference::New((void *)baton->out); - } else { - to = Null(); - } - Handle result = to; - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->refReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -/** - * @param {String} target - * @return {Reference} out - */ -Handle GitReference::SetSymbolicTarget(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String target is required."))); - } - - git_reference * out = 0; - const char * from_target; - String::Utf8Value target(args[0]->ToString()); - from_target = strdup(*target); - - int result = git_reference_symbolic_set_target( - &out - , ObjectWrap::Unwrap(args.This())->GetValue() - , from_target - ); - free((void *)from_target); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - Handle to; - if (out != NULL) { - to = GitReference::New((void *)out); - } else { - to = Null(); - } - return scope.Close(to); -} - -/** - * @param {Oid} id - * @return {Reference} out - */ -Handle GitReference::setTarget(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("Oid id is required."))); - } - - git_reference * out = 0; - const git_oid * from_id; - from_id = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - - int result = git_reference_set_target( - &out - , ObjectWrap::Unwrap(args.This())->GetValue() - , from_id - ); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - Handle to; - if (out != NULL) { - to = GitReference::New((void *)out); - } else { - to = Null(); - } - return scope.Close(to); -} - -#include "../include/functions/copy.h" - -/** - * @param {String} new_name - * @param {Number} force - * @param {Reference} callback - */ -Handle GitReference::Rename(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String new_name is required."))); - } - if (args.Length() == 1 || !args[1]->IsInt32()) { - return ThrowException(Exception::Error(String::New("Number force is required."))); - } - - if (args.Length() == 2 || !args[2]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - RenameBaton* baton = new RenameBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->refReference = Persistent::New(args.This()); - baton->ref = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->new_nameReference = Persistent::New(args[0]); - const char * from_new_name; - String::Utf8Value new_name(args[0]->ToString()); - from_new_name = strdup(*new_name); - baton->new_name = from_new_name; - baton->forceReference = Persistent::New(args[1]); - int from_force; - from_force = (int) args[1]->ToInt32()->Value(); - baton->force = from_force; - baton->callback = Persistent::New(Local::Cast(args[2])); - - uv_queue_work(uv_default_loop(), &baton->request, RenameWork, (uv_after_work_cb)RenameAfterWork); - - return Undefined(); -} - -void GitReference::RenameWork(uv_work_t *req) { - RenameBaton *baton = static_cast(req->data); - int result = git_reference_rename( - &baton->out, - baton->ref, - baton->new_name, - baton->force - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitReference::RenameAfterWork(uv_work_t *req) { - HandleScope scope; - RenameBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle to; - if (baton->out != NULL) { - to = GitReference::New((void *)baton->out); - } else { - to = Null(); - } - Handle result = to; - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->refReference.Dispose(); - baton->new_nameReference.Dispose(); - baton->forceReference.Dispose(); - baton->callback.Dispose(); - free((void *)baton->new_name); - delete baton; -} - -#include "../include/functions/copy.h" - -/** - */ -Handle GitReference::Delete(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - DeleteBaton* baton = new DeleteBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->refReference = Persistent::New(args.This()); - baton->ref = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->callback = Persistent::New(Local::Cast(args[0])); - - uv_queue_work(uv_default_loop(), &baton->request, DeleteWork, (uv_after_work_cb)DeleteAfterWork); - - return Undefined(); -} - -void GitReference::DeleteWork(uv_work_t *req) { - DeleteBaton *baton = static_cast(req->data); - int result = git_reference_delete( - baton->ref - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitReference::DeleteAfterWork(uv_work_t *req) { - HandleScope scope; - DeleteBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle result = Local::New(Undefined()); - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->refReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -/** - */ -Handle GitReference::IsBranch(const Arguments& args) { - HandleScope scope; - - - int result = git_reference_is_branch( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - return Undefined(); -} - -/** - */ -Handle GitReference::IsRemote(const Arguments& args) { - HandleScope scope; - - - int result = git_reference_is_remote( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - return Undefined(); -} - -/** - * @param {Number} type - * @return {Object} out - */ -Handle GitReference::Peel(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsInt32()) { - return ThrowException(Exception::Error(String::New("Number type is required."))); - } - - git_object * out = 0; - git_otype from_type; - from_type = (git_otype) args[0]->ToInt32()->Value(); - - int result = git_reference_peel( - &out - , ObjectWrap::Unwrap(args.This())->GetValue() - , from_type - ); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - Handle to; - if (out != NULL) { - to = GitObject::New((void *)out); - } else { - to = Null(); - } - return scope.Close(to); -} - -/** - * @param {String} refname - */ -Handle GitReference::IsValidName(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String refname is required."))); - } - - const char * from_refname; - String::Utf8Value refname(args[0]->ToString()); - from_refname = strdup(*refname); - - int result = git_reference_is_valid_name( - from_refname - ); - free((void *)from_refname); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - return Undefined(); -} - -Persistent GitReference::constructor_template; diff --git a/src/remote.cc b/src/remote.cc deleted file mode 100644 index c0fd95a6b..000000000 --- a/src/remote.cc +++ /dev/null @@ -1,612 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ -#include -#include -#include - -#include "git2.h" - -#include "../include/functions/copy.h" - -#include "../include/remote.h" -#include "git2/net.h" - -using namespace v8; -using namespace node; - -GitRemote::GitRemote(git_remote *raw) { - this->raw = raw; -} - -GitRemote::~GitRemote() { - git_remote_free(this->raw); -} - -void GitRemote::Initialize(Handle target) { - HandleScope scope; - - Local tpl = FunctionTemplate::New(New); - - tpl->InstanceTemplate()->SetInternalFieldCount(1); - tpl->SetClassName(String::NewSymbol("Remote")); - - NODE_SET_PROTOTYPE_METHOD(tpl, "name", Name); - NODE_SET_PROTOTYPE_METHOD(tpl, "url", Url); - NODE_SET_PROTOTYPE_METHOD(tpl, "pushUrl", PushUrl); - NODE_SET_PROTOTYPE_METHOD(tpl, "setUrl", SetUrl); - NODE_SET_PROTOTYPE_METHOD(tpl, "setPushUrl", SetPushUrl); - NODE_SET_PROTOTYPE_METHOD(tpl, "connect", Connect); - NODE_SET_PROTOTYPE_METHOD(tpl, "download", Download); - NODE_SET_PROTOTYPE_METHOD(tpl, "connected", Connected); - NODE_SET_PROTOTYPE_METHOD(tpl, "stop", Stop); - NODE_SET_PROTOTYPE_METHOD(tpl, "disconnect", Disconnect); - NODE_SET_PROTOTYPE_METHOD(tpl, "updateTips", UpdateTips); - NODE_SET_METHOD(tpl, "validUrl", ValidUrl); - NODE_SET_METHOD(tpl, "supportedUrl", SupportedUrl); - NODE_SET_PROTOTYPE_METHOD(tpl, "checkCert", CheckCert); - NODE_SET_PROTOTYPE_METHOD(tpl, "updateFetchhead", UpdateFetchhead); - NODE_SET_PROTOTYPE_METHOD(tpl, "setUpdateFetchhead", SetUpdateFetchhead); - NODE_SET_METHOD(tpl, "isValidName", IsValidName); - - - constructor_template = Persistent::New(tpl->GetFunction()); - target->Set(String::NewSymbol("Remote"), constructor_template); -} - -Handle GitRemote::New(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsExternal()) { - return ThrowException(Exception::Error(String::New("git_remote is required."))); - } - - GitRemote* object = new GitRemote((git_remote *) External::Unwrap(args[0])); - object->Wrap(args.This()); - - return scope.Close(args.This()); -} - -Handle GitRemote::New(void *raw) { - HandleScope scope; - Handle argv[1] = { External::New((void *)raw) }; - return scope.Close(GitRemote::constructor_template->NewInstance(1, argv)); -} - -git_remote *GitRemote::GetValue() { - return this->raw; -} - - -/** - * @return {String} result - */ -Handle GitRemote::Name(const Arguments& args) { - HandleScope scope; - - - const char * result = git_remote_name( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - to = String::New(result); - return scope.Close(to); -} - -/** - * @return {String} result - */ -Handle GitRemote::Url(const Arguments& args) { - HandleScope scope; - - - const char * result = git_remote_url( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - to = String::New(result); - return scope.Close(to); -} - -/** - * @return {String} result - */ -Handle GitRemote::PushUrl(const Arguments& args) { - HandleScope scope; - - - const char * result = git_remote_pushurl( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - to = String::New(result); - return scope.Close(to); -} - -/** - * @param {String} url - */ -Handle GitRemote::SetUrl(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String url is required."))); - } - - const char* from_url; - String::Utf8Value url(args[0]->ToString()); - from_url = strdup(*url); - - int result = git_remote_set_url( - ObjectWrap::Unwrap(args.This())->GetValue() - , from_url - ); - free((void *)from_url); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - return Undefined(); -} - -/** - * @param {String} url - */ -Handle GitRemote::SetPushUrl(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String url is required."))); - } - - const char* from_url; - String::Utf8Value url(args[0]->ToString()); - from_url = strdup(*url); - - int result = git_remote_set_pushurl( - ObjectWrap::Unwrap(args.This())->GetValue() - , from_url - ); - free((void *)from_url); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - return Undefined(); -} - -#include "../include/functions/copy.h" - -/** - * @param {Number} direction - */ -Handle GitRemote::Connect(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsNumber()) { - return ThrowException(Exception::Error(String::New("Number direction is required."))); - } - - if (args.Length() == 1 || !args[1]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - ConnectBaton* baton = new ConnectBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->remoteReference = Persistent::New(args.This()); - baton->remote = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->directionReference = Persistent::New(args[0]); - git_direction from_direction; - from_direction = (git_direction) (int) args[0]->ToNumber()->Value(); - baton->direction = from_direction; - baton->callback = Persistent::New(Local::Cast(args[1])); - - uv_queue_work(uv_default_loop(), &baton->request, ConnectWork, (uv_after_work_cb)ConnectAfterWork); - - return Undefined(); -} - -void GitRemote::ConnectWork(uv_work_t *req) { - ConnectBaton *baton = static_cast(req->data); - int result = git_remote_connect( - baton->remote, - baton->direction - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitRemote::ConnectAfterWork(uv_work_t *req) { - HandleScope scope; - ConnectBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle result = Local::New(Undefined()); - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->remoteReference.Dispose(); - baton->directionReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -#include "../include/functions/copy.h" - -/** - * @param {Function} progress_cb - * @param {void} payload - */ -Handle GitRemote::Download(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 1 || !args[1]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - DownloadBaton* baton = new DownloadBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->remoteReference = Persistent::New(args.This()); - baton->remote = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->progress_cbReference = Persistent::New(args[0]); - git_transfer_progress_callback from_progress_cb; - if (args[0]->IsFunction()) { - Persistent::New(Local::Cast(args[0])); - } else { - from_progress_cb = 0; - } - baton->progress_cb = from_progress_cb; - baton->payloadReference = Persistent::New(args[1]); - baton->callback = Persistent::New(Local::Cast(args[1])); - - uv_queue_work(uv_default_loop(), &baton->request, DownloadWork, (uv_after_work_cb)DownloadAfterWork); - - return Undefined(); -} - -void GitRemote::DownloadWork(uv_work_t *req) { - DownloadBaton *baton = static_cast(req->data); - int result = git_remote_download( - baton->remote, - baton->progress_cb, - baton->payload - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitRemote::DownloadAfterWork(uv_work_t *req) { - HandleScope scope; - DownloadBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle result = Local::New(Undefined()); - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->remoteReference.Dispose(); - baton->progress_cbReference.Dispose(); - baton->payloadReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -/** - */ -Handle GitRemote::Connected(const Arguments& args) { - HandleScope scope; - - - int result = git_remote_connected( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - return Undefined(); -} - -/** - */ -Handle GitRemote::Stop(const Arguments& args) { - HandleScope scope; - - - git_remote_stop( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - return Undefined(); -} - -#include "../include/functions/copy.h" - -/** - */ -Handle GitRemote::Disconnect(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - DisconnectBaton* baton = new DisconnectBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->remoteReference = Persistent::New(args.This()); - baton->remote = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->callback = Persistent::New(Local::Cast(args[0])); - - uv_queue_work(uv_default_loop(), &baton->request, DisconnectWork, (uv_after_work_cb)DisconnectAfterWork); - - return Undefined(); -} - -void GitRemote::DisconnectWork(uv_work_t *req) { - DisconnectBaton *baton = static_cast(req->data); - git_remote_disconnect( - baton->remote - ); -} - -void GitRemote::DisconnectAfterWork(uv_work_t *req) { - HandleScope scope; - DisconnectBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle result = Local::New(Undefined()); - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->remoteReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -/** - */ -Handle GitRemote::UpdateTips(const Arguments& args) { - HandleScope scope; - - - int result = git_remote_update_tips( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - return Undefined(); -} - -/** - * @param {String} url - */ -Handle GitRemote::ValidUrl(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String url is required."))); - } - - const char * from_url; - String::Utf8Value url(args[0]->ToString()); - from_url = strdup(*url); - - int result = git_remote_valid_url( - from_url - ); - free((void *)from_url); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - return Undefined(); -} - -/** - * @param {String} url - */ -Handle GitRemote::SupportedUrl(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String url is required."))); - } - - const char* from_url; - String::Utf8Value url(args[0]->ToString()); - from_url = strdup(*url); - - int result = git_remote_supported_url( - from_url - ); - free((void *)from_url); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - return Undefined(); -} - -/** - * @param {Number} check - */ -Handle GitRemote::CheckCert(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsInt32()) { - return ThrowException(Exception::Error(String::New("Number check is required."))); - } - - int from_check; - from_check = (int) args[0]->ToInt32()->Value(); - - git_remote_check_cert( - ObjectWrap::Unwrap(args.This())->GetValue() - , from_check - ); - - return Undefined(); -} - -/** - */ -Handle GitRemote::UpdateFetchhead(const Arguments& args) { - HandleScope scope; - - - int result = git_remote_update_fetchhead( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - return Undefined(); -} - -/** - * @param {Number} value - */ -Handle GitRemote::SetUpdateFetchhead(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsInt32()) { - return ThrowException(Exception::Error(String::New("Number value is required."))); - } - - int from_value; - from_value = (int) args[0]->ToInt32()->Value(); - - git_remote_set_update_fetchhead( - ObjectWrap::Unwrap(args.This())->GetValue() - , from_value - ); - - return Undefined(); -} - -/** - * @param {String} remote_name - */ -Handle GitRemote::IsValidName(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String remote_name is required."))); - } - - const char * from_remote_name; - String::Utf8Value remote_name(args[0]->ToString()); - from_remote_name = strdup(*remote_name); - - int result = git_remote_is_valid_name( - from_remote_name - ); - free((void *)from_remote_name); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - return Undefined(); -} - -Persistent GitRemote::constructor_template; diff --git a/src/repo.cc b/src/repo.cc deleted file mode 100755 index 637479573..000000000 --- a/src/repo.cc +++ /dev/null @@ -1,2366 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ -#include -#include -#include - -#include "git2.h" - -#include "../include/functions/copy.h" - -#include "../include/repo.h" -#include "../include/oid.h" -#include "../include/commit.h" -#include "../include/blob.h" -#include "../include/object.h" -#include "../include/reference.h" -#include "../include/submodule.h" -#include "../include/refdb.h" -#include "../include/revwalk.h" -#include "../include/tag.h" -#include "../include/signature.h" -#include "../include/tree.h" -#include "../include/odb.h" -#include "../include/index.h" -#include "../include/remote.h" -#include "../include/clone_options.h" -#include "node_buffer.h" - -using namespace v8; -using namespace node; - -GitRepo::GitRepo(git_repository *raw) { - this->raw = raw; -} - -GitRepo::~GitRepo() { - git_repository_free(this->raw); -} - -void GitRepo::Initialize(Handle target) { - HandleScope scope; - - Local tpl = FunctionTemplate::New(New); - - tpl->InstanceTemplate()->SetInternalFieldCount(1); - tpl->SetClassName(String::NewSymbol("Repo")); - - NODE_SET_METHOD(tpl, "open", Open); - NODE_SET_METHOD(tpl, "init", Init); - NODE_SET_PROTOTYPE_METHOD(tpl, "path", Path); - NODE_SET_PROTOTYPE_METHOD(tpl, "workdir", Workdir); - NODE_SET_PROTOTYPE_METHOD(tpl, "odb", Odb); - NODE_SET_PROTOTYPE_METHOD(tpl, "openIndex", openIndex); - NODE_SET_PROTOTYPE_METHOD(tpl, "getBlob", GetBlob); - NODE_SET_PROTOTYPE_METHOD(tpl, "getCommit", GetCommit); - NODE_SET_PROTOTYPE_METHOD(tpl, "createCommit", CreateCommit); - NODE_SET_PROTOTYPE_METHOD(tpl, "getObject", GetObject); - NODE_SET_PROTOTYPE_METHOD(tpl, "getReference", GetReference); - NODE_SET_PROTOTYPE_METHOD(tpl, "createSymbolicReference", CreateSymbolicReference); - NODE_SET_PROTOTYPE_METHOD(tpl, "createReference", CreateReference); - NODE_SET_PROTOTYPE_METHOD(tpl, "addRemote", AddRemote); - NODE_SET_PROTOTYPE_METHOD(tpl, "createRevWalk", CreateRevWalk); - NODE_SET_PROTOTYPE_METHOD(tpl, "getSubmodule", GetSubmodule); - NODE_SET_PROTOTYPE_METHOD(tpl, "addSubmodule", AddSubmodule); - NODE_SET_PROTOTYPE_METHOD(tpl, "getTag", GetTag); - NODE_SET_PROTOTYPE_METHOD(tpl, "createTag", CreateTag); - NODE_SET_METHOD(tpl, "createLightweightTag", CreateLightweightTag); - NODE_SET_PROTOTYPE_METHOD(tpl, "getTree", GetTree); - NODE_SET_METHOD(tpl, "reloadSubmodules", ReloadSubmodules); - NODE_SET_PROTOTYPE_METHOD(tpl, "delete", Delete); - NODE_SET_PROTOTYPE_METHOD(tpl, "getReferences", GetReferences); - NODE_SET_PROTOTYPE_METHOD(tpl, "createBlobFromBuffer", CreateBlobFromBuffer); - NODE_SET_PROTOTYPE_METHOD(tpl, "createBlobFromFile", CreateBlobFromFile); - NODE_SET_PROTOTYPE_METHOD(tpl, "getRemotes", GetRemotes); - NODE_SET_METHOD(tpl, "clone", Clone); - NODE_SET_PROTOTYPE_METHOD(tpl, "getRemote", GetRemote); - - - constructor_template = Persistent::New(tpl->GetFunction()); - target->Set(String::NewSymbol("Repo"), constructor_template); -} - -Handle GitRepo::New(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsExternal()) { - return ThrowException(Exception::Error(String::New("git_repository is required."))); - } - - GitRepo* object = new GitRepo((git_repository *) External::Unwrap(args[0])); - object->Wrap(args.This()); - - return scope.Close(args.This()); -} - -Handle GitRepo::New(void *raw) { - HandleScope scope; - Handle argv[1] = { External::New((void *)raw) }; - return scope.Close(GitRepo::constructor_template->NewInstance(1, argv)); -} - -git_repository *GitRepo::GetValue() { - return this->raw; -} - - -#include "../include/functions/copy.h" - -/** - * @param {String} path - * @param {Repository} callback - */ -Handle GitRepo::Open(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String path is required."))); - } - - if (args.Length() == 1 || !args[1]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - OpenBaton* baton = new OpenBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->pathReference = Persistent::New(args[0]); - const char * from_path; - String::Utf8Value path(args[0]->ToString()); - from_path = strdup(*path); - baton->path = from_path; - baton->callback = Persistent::New(Local::Cast(args[1])); - - uv_queue_work(uv_default_loop(), &baton->request, OpenWork, (uv_after_work_cb)OpenAfterWork); - - return Undefined(); -} - -void GitRepo::OpenWork(uv_work_t *req) { - OpenBaton *baton = static_cast(req->data); - int result = git_repository_open( - &baton->out, - baton->path - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitRepo::OpenAfterWork(uv_work_t *req) { - HandleScope scope; - OpenBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle to; - if (baton->out != NULL) { - to = GitRepo::New((void *)baton->out); - } else { - to = Null(); - } - Handle result = to; - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->pathReference.Dispose(); - baton->callback.Dispose(); - free((void *)baton->path); - delete baton; -} - -#include "../include/functions/copy.h" - -/** - * @param {String} path - * @param {Boolean} is_bare - * @param {Repository} callback - */ -Handle GitRepo::Init(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String path is required."))); - } - if (args.Length() == 1 || !args[1]->IsBoolean()) { - return ThrowException(Exception::Error(String::New("Boolean is_bare is required."))); - } - - if (args.Length() == 2 || !args[2]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - InitBaton* baton = new InitBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->pathReference = Persistent::New(args[0]); - const char * from_path; - String::Utf8Value path(args[0]->ToString()); - from_path = strdup(*path); - baton->path = from_path; - baton->is_bareReference = Persistent::New(args[1]); - unsigned from_is_bare; - from_is_bare = (unsigned) args[1]->ToBoolean()->Value(); - baton->is_bare = from_is_bare; - baton->callback = Persistent::New(Local::Cast(args[2])); - - uv_queue_work(uv_default_loop(), &baton->request, InitWork, (uv_after_work_cb)InitAfterWork); - - return Undefined(); -} - -void GitRepo::InitWork(uv_work_t *req) { - InitBaton *baton = static_cast(req->data); - int result = git_repository_init( - &baton->out, - baton->path, - baton->is_bare - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitRepo::InitAfterWork(uv_work_t *req) { - HandleScope scope; - InitBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle to; - if (baton->out != NULL) { - to = GitRepo::New((void *)baton->out); - } else { - to = Null(); - } - Handle result = to; - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->pathReference.Dispose(); - baton->is_bareReference.Dispose(); - baton->callback.Dispose(); - free((void *)baton->path); - delete baton; -} - -/** - * @return {String} result - */ -Handle GitRepo::Path(const Arguments& args) { - HandleScope scope; - - - const char * result = git_repository_path( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - to = String::New(result); - return scope.Close(to); -} - -/** - * @return {String} result - */ -Handle GitRepo::Workdir(const Arguments& args) { - HandleScope scope; - - - const char * result = git_repository_workdir( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - to = String::New(result); - return scope.Close(to); -} - -/** - * @return {Odb} out - */ -Handle GitRepo::Odb(const Arguments& args) { - HandleScope scope; - - git_odb * out = 0; - - int result = git_repository_odb( - &out - , ObjectWrap::Unwrap(args.This())->GetValue() - ); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - Handle to; - if (out != NULL) { - to = GitOdb::New((void *)out); - } else { - to = Null(); - } - return scope.Close(to); -} - -#include "../include/functions/copy.h" - -/** - * @param {Index} callback - */ -Handle GitRepo::openIndex(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - openIndexBaton* baton = new openIndexBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->repoReference = Persistent::New(args.This()); - baton->repo = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->callback = Persistent::New(Local::Cast(args[0])); - - uv_queue_work(uv_default_loop(), &baton->request, openIndexWork, (uv_after_work_cb)openIndexAfterWork); - - return Undefined(); -} - -void GitRepo::openIndexWork(uv_work_t *req) { - openIndexBaton *baton = static_cast(req->data); - int result = git_repository_index( - &baton->out, - baton->repo - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitRepo::openIndexAfterWork(uv_work_t *req) { - HandleScope scope; - openIndexBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle to; - if (baton->out != NULL) { - to = GitIndex::New((void *)baton->out); - } else { - to = Null(); - } - Handle result = to; - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->repoReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -#include "../include/functions/copy.h" - -/** - * @param {Oid} id - * @param {Blob} callback - */ -Handle GitRepo::GetBlob(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("Oid id is required."))); - } - - if (args.Length() == 1 || !args[1]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - GetBlobBaton* baton = new GetBlobBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->repoReference = Persistent::New(args.This()); - baton->repo = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->idReference = Persistent::New(args[0]); - const git_oid * from_id; - from_id = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - baton->id = from_id; - baton->callback = Persistent::New(Local::Cast(args[1])); - - uv_queue_work(uv_default_loop(), &baton->request, GetBlobWork, (uv_after_work_cb)GetBlobAfterWork); - - return Undefined(); -} - -void GitRepo::GetBlobWork(uv_work_t *req) { - GetBlobBaton *baton = static_cast(req->data); - int result = git_blob_lookup( - &baton->blob, - baton->repo, - baton->id - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitRepo::GetBlobAfterWork(uv_work_t *req) { - HandleScope scope; - GetBlobBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle to; - if (baton->blob != NULL) { - to = GitBlob::New((void *)baton->blob); - } else { - to = Null(); - } - Handle result = to; - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->repoReference.Dispose(); - baton->idReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -#include "../include/functions/copy.h" - -/** - * @param {Oid} id - * @param {Commit} callback - */ -Handle GitRepo::GetCommit(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("Oid id is required."))); - } - - if (args.Length() == 1 || !args[1]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - GetCommitBaton* baton = new GetCommitBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->repoReference = Persistent::New(args.This()); - baton->repo = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->idReference = Persistent::New(args[0]); - const git_oid * from_id; - from_id = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - baton->id = from_id; - baton->callback = Persistent::New(Local::Cast(args[1])); - - uv_queue_work(uv_default_loop(), &baton->request, GetCommitWork, (uv_after_work_cb)GetCommitAfterWork); - - return Undefined(); -} - -void GitRepo::GetCommitWork(uv_work_t *req) { - GetCommitBaton *baton = static_cast(req->data); - int result = git_commit_lookup( - &baton->commit, - baton->repo, - baton->id - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitRepo::GetCommitAfterWork(uv_work_t *req) { - HandleScope scope; - GetCommitBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle to; - if (baton->commit != NULL) { - to = GitCommit::New((void *)baton->commit); - } else { - to = Null(); - } - Handle result = to; - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->repoReference.Dispose(); - baton->idReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -#include "../include/functions/copy.h" - -/** - * @param {String} update_ref - * @param {Signature} author - * @param {Signature} committer - * @param {String} message_encoding - * @param {String} message - * @param {Tree} tree - * @param {Number} parent_count - * @param {Array} parents - * @param {Oid} callback - */ -Handle GitRepo::CreateCommit(const Arguments& args) { - HandleScope scope; - if (args.Length() == 1 || !args[1]->IsObject()) { - return ThrowException(Exception::Error(String::New("Signature author is required."))); - } - if (args.Length() == 2 || !args[2]->IsObject()) { - return ThrowException(Exception::Error(String::New("Signature committer is required."))); - } - if (args.Length() == 4 || !args[4]->IsString()) { - return ThrowException(Exception::Error(String::New("String message is required."))); - } - if (args.Length() == 5 || !args[5]->IsObject()) { - return ThrowException(Exception::Error(String::New("Tree tree is required."))); - } - if (args.Length() == 6 || !args[6]->IsInt32()) { - return ThrowException(Exception::Error(String::New("Number parent_count is required."))); - } - if (args.Length() == 7 || !args[7]->IsObject()) { - return ThrowException(Exception::Error(String::New("Array parents is required."))); - } - - if (args.Length() == 8 || !args[8]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - CreateCommitBaton* baton = new CreateCommitBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->id = (git_oid *)malloc(sizeof(git_oid )); - baton->repoReference = Persistent::New(args.This()); - baton->repo = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->update_refReference = Persistent::New(args[0]); - const char * from_update_ref; - if (args[0]->IsString()) { - String::Utf8Value update_ref(args[0]->ToString()); - from_update_ref = strdup(*update_ref); - } else { - from_update_ref = 0; - } - baton->update_ref = from_update_ref; - baton->authorReference = Persistent::New(args[1]); - const git_signature * from_author; - from_author = ObjectWrap::Unwrap(args[1]->ToObject())->GetValue(); - baton->author = from_author; - baton->committerReference = Persistent::New(args[2]); - const git_signature * from_committer; - from_committer = ObjectWrap::Unwrap(args[2]->ToObject())->GetValue(); - baton->committer = from_committer; - baton->message_encodingReference = Persistent::New(args[3]); - const char * from_message_encoding; - if (args[3]->IsString()) { - String::Utf8Value message_encoding(args[3]->ToString()); - from_message_encoding = strdup(*message_encoding); - } else { - from_message_encoding = 0; - } - baton->message_encoding = from_message_encoding; - baton->messageReference = Persistent::New(args[4]); - const char * from_message; - String::Utf8Value message(args[4]->ToString()); - from_message = strdup(*message); - baton->message = from_message; - baton->treeReference = Persistent::New(args[5]); - const git_tree * from_tree; - from_tree = ObjectWrap::Unwrap(args[5]->ToObject())->GetValue(); - baton->tree = from_tree; - baton->parent_countReference = Persistent::New(args[6]); - int from_parent_count; - from_parent_count = (int) args[6]->ToInt32()->Value(); - baton->parent_count = from_parent_count; - baton->parentsReference = Persistent::New(args[7]); - const git_commit ** from_parents; - Array *tmp_parents = Array::Cast(*args[7]); - from_parents = (const git_commit **)malloc(tmp_parents->Length() * sizeof(const git_commit *)); - for (unsigned int i = 0; i < tmp_parents->Length(); i++) { - - from_parents[i] = ObjectWrap::Unwrap(tmp_parents->Get(Number::New(static_cast(i)))->ToObject())->GetValue(); - } - baton->parents = from_parents; - baton->callback = Persistent::New(Local::Cast(args[8])); - - uv_queue_work(uv_default_loop(), &baton->request, CreateCommitWork, (uv_after_work_cb)CreateCommitAfterWork); - - return Undefined(); -} - -void GitRepo::CreateCommitWork(uv_work_t *req) { - CreateCommitBaton *baton = static_cast(req->data); - int result = git_commit_create( - baton->id, - baton->repo, - baton->update_ref, - baton->author, - baton->committer, - baton->message_encoding, - baton->message, - baton->tree, - baton->parent_count, - baton->parents - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitRepo::CreateCommitAfterWork(uv_work_t *req) { - HandleScope scope; - CreateCommitBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle to; - if (baton->id != NULL) { - to = GitOid::New((void *)baton->id); - } else { - to = Null(); - } - Handle result = to; - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - free(baton->id); - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->repoReference.Dispose(); - baton->update_refReference.Dispose(); - baton->authorReference.Dispose(); - baton->committerReference.Dispose(); - baton->message_encodingReference.Dispose(); - baton->messageReference.Dispose(); - baton->treeReference.Dispose(); - baton->parent_countReference.Dispose(); - baton->parentsReference.Dispose(); - baton->callback.Dispose(); - free((void *)baton->update_ref); - free((void *)baton->message_encoding); - free((void *)baton->message); - free((void *)baton->parents); - delete baton; -} - -#include "../include/functions/copy.h" - -/** - * @param {Oid} id - * @param {Number} type - * @param {Object} callback - */ -Handle GitRepo::GetObject(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("Oid id is required."))); - } - if (args.Length() == 1 || !args[1]->IsInt32()) { - return ThrowException(Exception::Error(String::New("Number type is required."))); - } - - if (args.Length() == 2 || !args[2]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - GetObjectBaton* baton = new GetObjectBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->repoReference = Persistent::New(args.This()); - baton->repo = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->idReference = Persistent::New(args[0]); - const git_oid * from_id; - from_id = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - baton->id = from_id; - baton->typeReference = Persistent::New(args[1]); - git_otype from_type; - from_type = (git_otype) args[1]->ToInt32()->Value(); - baton->type = from_type; - baton->callback = Persistent::New(Local::Cast(args[2])); - - uv_queue_work(uv_default_loop(), &baton->request, GetObjectWork, (uv_after_work_cb)GetObjectAfterWork); - - return Undefined(); -} - -void GitRepo::GetObjectWork(uv_work_t *req) { - GetObjectBaton *baton = static_cast(req->data); - int result = git_object_lookup( - &baton->object, - baton->repo, - baton->id, - baton->type - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitRepo::GetObjectAfterWork(uv_work_t *req) { - HandleScope scope; - GetObjectBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle to; - if (baton->object != NULL) { - to = GitObject::New((void *)baton->object); - } else { - to = Null(); - } - Handle result = to; - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->repoReference.Dispose(); - baton->idReference.Dispose(); - baton->typeReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -#include "../include/functions/copy.h" - -/** - * @param {String} name - * @param {Reference} callback - */ -Handle GitRepo::GetReference(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String name is required."))); - } - - if (args.Length() == 1 || !args[1]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - GetReferenceBaton* baton = new GetReferenceBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->repoReference = Persistent::New(args.This()); - baton->repo = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->nameReference = Persistent::New(args[0]); - const char * from_name; - String::Utf8Value name(args[0]->ToString()); - from_name = strdup(*name); - baton->name = from_name; - baton->callback = Persistent::New(Local::Cast(args[1])); - - uv_queue_work(uv_default_loop(), &baton->request, GetReferenceWork, (uv_after_work_cb)GetReferenceAfterWork); - - return Undefined(); -} - -void GitRepo::GetReferenceWork(uv_work_t *req) { - GetReferenceBaton *baton = static_cast(req->data); - int result = git_reference_lookup( - &baton->out, - baton->repo, - baton->name - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitRepo::GetReferenceAfterWork(uv_work_t *req) { - HandleScope scope; - GetReferenceBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle to; - if (baton->out != NULL) { - to = GitReference::New((void *)baton->out); - } else { - to = Null(); - } - Handle result = to; - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->repoReference.Dispose(); - baton->nameReference.Dispose(); - baton->callback.Dispose(); - free((void *)baton->name); - delete baton; -} - -/** - * @param {String} name - * @param {String} target - * @param {Number} force - * @return {Reference} out - */ -Handle GitRepo::CreateSymbolicReference(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String name is required."))); - } - if (args.Length() == 1 || !args[1]->IsString()) { - return ThrowException(Exception::Error(String::New("String target is required."))); - } - if (args.Length() == 2 || !args[2]->IsInt32()) { - return ThrowException(Exception::Error(String::New("Number force is required."))); - } - - git_reference * out = 0; - const char * from_name; - String::Utf8Value name(args[0]->ToString()); - from_name = strdup(*name); - const char * from_target; - String::Utf8Value target(args[1]->ToString()); - from_target = strdup(*target); - int from_force; - from_force = (int) args[2]->ToInt32()->Value(); - - int result = git_reference_symbolic_create( - &out - , ObjectWrap::Unwrap(args.This())->GetValue() - , from_name - , from_target - , from_force - ); - free((void *)from_name); - free((void *)from_target); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - Handle to; - if (out != NULL) { - to = GitReference::New((void *)out); - } else { - to = Null(); - } - return scope.Close(to); -} - -/** - * @param {String} name - * @param {Oid} id - * @param {Number} force - * @return {Reference} out - */ -Handle GitRepo::CreateReference(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String name is required."))); - } - if (args.Length() == 1 || !args[1]->IsObject()) { - return ThrowException(Exception::Error(String::New("Oid id is required."))); - } - if (args.Length() == 2 || !args[2]->IsInt32()) { - return ThrowException(Exception::Error(String::New("Number force is required."))); - } - - git_reference * out = 0; - const char * from_name; - String::Utf8Value name(args[0]->ToString()); - from_name = strdup(*name); - const git_oid * from_id; - from_id = ObjectWrap::Unwrap(args[1]->ToObject())->GetValue(); - int from_force; - from_force = (int) args[2]->ToInt32()->Value(); - - int result = git_reference_create( - &out - , ObjectWrap::Unwrap(args.This())->GetValue() - , from_name - , from_id - , from_force - ); - free((void *)from_name); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - Handle to; - if (out != NULL) { - to = GitReference::New((void *)out); - } else { - to = Null(); - } - return scope.Close(to); -} - -#include "../include/functions/copy.h" - -/** - * @param {String} name - * @param {String} url - * @param {Remote} callback - */ -Handle GitRepo::AddRemote(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String name is required."))); - } - if (args.Length() == 1 || !args[1]->IsString()) { - return ThrowException(Exception::Error(String::New("String url is required."))); - } - - if (args.Length() == 2 || !args[2]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - AddRemoteBaton* baton = new AddRemoteBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->repoReference = Persistent::New(args.This()); - baton->repo = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->nameReference = Persistent::New(args[0]); - const char * from_name; - String::Utf8Value name(args[0]->ToString()); - from_name = strdup(*name); - baton->name = from_name; - baton->urlReference = Persistent::New(args[1]); - const char * from_url; - String::Utf8Value url(args[1]->ToString()); - from_url = strdup(*url); - baton->url = from_url; - baton->callback = Persistent::New(Local::Cast(args[2])); - - uv_queue_work(uv_default_loop(), &baton->request, AddRemoteWork, (uv_after_work_cb)AddRemoteAfterWork); - - return Undefined(); -} - -void GitRepo::AddRemoteWork(uv_work_t *req) { - AddRemoteBaton *baton = static_cast(req->data); - int result = git_remote_create( - &baton->out, - baton->repo, - baton->name, - baton->url - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitRepo::AddRemoteAfterWork(uv_work_t *req) { - HandleScope scope; - AddRemoteBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle to; - if (baton->out != NULL) { - to = GitRemote::New((void *)baton->out); - } else { - to = Null(); - } - Handle result = to; - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->repoReference.Dispose(); - baton->nameReference.Dispose(); - baton->urlReference.Dispose(); - baton->callback.Dispose(); - free((void *)baton->name); - free((void *)baton->url); - delete baton; -} - -/** - * @return {RevWalk} out - */ -Handle GitRepo::CreateRevWalk(const Arguments& args) { - HandleScope scope; - - git_revwalk * out = 0; - - int result = git_revwalk_new( - &out - , ObjectWrap::Unwrap(args.This())->GetValue() - ); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - Handle to; - if (out != NULL) { - to = GitRevWalk::New((void *)out); - } else { - to = Null(); - } - return scope.Close(to); -} - -/** - * @param {String} name - * @return {Submodule} submodule - */ -Handle GitRepo::GetSubmodule(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String name is required."))); - } - - git_submodule * submodule = 0; - const char * from_name; - String::Utf8Value name(args[0]->ToString()); - from_name = strdup(*name); - - int result = git_submodule_lookup( - &submodule - , ObjectWrap::Unwrap(args.This())->GetValue() - , from_name - ); - free((void *)from_name); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - Handle to; - if (submodule != NULL) { - to = GitSubmodule::New((void *)submodule); - } else { - to = Null(); - } - return scope.Close(to); -} - -/** - * @param {String} url - * @param {String} path - * @param {Number} use_gitlink - * @return {Submodule} submodule - */ -Handle GitRepo::AddSubmodule(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String url is required."))); - } - if (args.Length() == 1 || !args[1]->IsString()) { - return ThrowException(Exception::Error(String::New("String path is required."))); - } - if (args.Length() == 2 || !args[2]->IsInt32()) { - return ThrowException(Exception::Error(String::New("Number use_gitlink is required."))); - } - - git_submodule * submodule = 0; - const char * from_url; - String::Utf8Value url(args[0]->ToString()); - from_url = strdup(*url); - const char * from_path; - String::Utf8Value path(args[1]->ToString()); - from_path = strdup(*path); - int from_use_gitlink; - from_use_gitlink = (int) args[2]->ToInt32()->Value(); - - int result = git_submodule_add_setup( - &submodule - , ObjectWrap::Unwrap(args.This())->GetValue() - , from_url - , from_path - , from_use_gitlink - ); - free((void *)from_url); - free((void *)from_path); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - Handle to; - if (submodule != NULL) { - to = GitSubmodule::New((void *)submodule); - } else { - to = Null(); - } - return scope.Close(to); -} - -#include "../include/functions/copy.h" - -/** - * @param {Oid} id - * @param {Tag} callback - */ -Handle GitRepo::GetTag(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("Oid id is required."))); - } - - if (args.Length() == 1 || !args[1]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - GetTagBaton* baton = new GetTagBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->repoReference = Persistent::New(args.This()); - baton->repo = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->idReference = Persistent::New(args[0]); - const git_oid * from_id; - from_id = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - baton->id = from_id; - baton->callback = Persistent::New(Local::Cast(args[1])); - - uv_queue_work(uv_default_loop(), &baton->request, GetTagWork, (uv_after_work_cb)GetTagAfterWork); - - return Undefined(); -} - -void GitRepo::GetTagWork(uv_work_t *req) { - GetTagBaton *baton = static_cast(req->data); - int result = git_tag_lookup( - &baton->out, - baton->repo, - baton->id - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitRepo::GetTagAfterWork(uv_work_t *req) { - HandleScope scope; - GetTagBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle to; - if (baton->out != NULL) { - to = GitTag::New((void *)baton->out); - } else { - to = Null(); - } - Handle result = to; - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->repoReference.Dispose(); - baton->idReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -#include "../include/functions/copy.h" - -/** - * @param {String} tag_name - * @param {Object} target - * @param {Signature} tagger - * @param {String} message - * @param {Number} force - * @param {Oid} callback - */ -Handle GitRepo::CreateTag(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String tag_name is required."))); - } - if (args.Length() == 1 || !args[1]->IsObject()) { - return ThrowException(Exception::Error(String::New("Object target is required."))); - } - if (args.Length() == 2 || !args[2]->IsObject()) { - return ThrowException(Exception::Error(String::New("Signature tagger is required."))); - } - if (args.Length() == 3 || !args[3]->IsString()) { - return ThrowException(Exception::Error(String::New("String message is required."))); - } - if (args.Length() == 4 || !args[4]->IsInt32()) { - return ThrowException(Exception::Error(String::New("Number force is required."))); - } - - if (args.Length() == 5 || !args[5]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - CreateTagBaton* baton = new CreateTagBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->oid = (git_oid *)malloc(sizeof(git_oid )); - baton->repoReference = Persistent::New(args.This()); - baton->repo = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->tag_nameReference = Persistent::New(args[0]); - const char * from_tag_name; - String::Utf8Value tag_name(args[0]->ToString()); - from_tag_name = strdup(*tag_name); - baton->tag_name = from_tag_name; - baton->targetReference = Persistent::New(args[1]); - const git_object * from_target; - from_target = ObjectWrap::Unwrap(args[1]->ToObject())->GetValue(); - baton->target = from_target; - baton->taggerReference = Persistent::New(args[2]); - const git_signature * from_tagger; - from_tagger = ObjectWrap::Unwrap(args[2]->ToObject())->GetValue(); - baton->tagger = from_tagger; - baton->messageReference = Persistent::New(args[3]); - const char * from_message; - String::Utf8Value message(args[3]->ToString()); - from_message = strdup(*message); - baton->message = from_message; - baton->forceReference = Persistent::New(args[4]); - int from_force; - from_force = (int) args[4]->ToInt32()->Value(); - baton->force = from_force; - baton->callback = Persistent::New(Local::Cast(args[5])); - - uv_queue_work(uv_default_loop(), &baton->request, CreateTagWork, (uv_after_work_cb)CreateTagAfterWork); - - return Undefined(); -} - -void GitRepo::CreateTagWork(uv_work_t *req) { - CreateTagBaton *baton = static_cast(req->data); - int result = git_tag_create( - baton->oid, - baton->repo, - baton->tag_name, - baton->target, - baton->tagger, - baton->message, - baton->force - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitRepo::CreateTagAfterWork(uv_work_t *req) { - HandleScope scope; - CreateTagBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle to; - if (baton->oid != NULL) { - to = GitOid::New((void *)baton->oid); - } else { - to = Null(); - } - Handle result = to; - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - free(baton->oid); - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->repoReference.Dispose(); - baton->tag_nameReference.Dispose(); - baton->targetReference.Dispose(); - baton->taggerReference.Dispose(); - baton->messageReference.Dispose(); - baton->forceReference.Dispose(); - baton->callback.Dispose(); - free((void *)baton->tag_name); - free((void *)baton->message); - delete baton; -} - -#include "../include/functions/copy.h" - -/** - * @param {String} tag_name - * @param {Object} target - * @param {Number} force - * @param {Oid} callback - */ -Handle GitRepo::CreateLightweightTag(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String tag_name is required."))); - } - if (args.Length() == 1 || !args[1]->IsObject()) { - return ThrowException(Exception::Error(String::New("Object target is required."))); - } - if (args.Length() == 2 || !args[2]->IsInt32()) { - return ThrowException(Exception::Error(String::New("Number force is required."))); - } - - if (args.Length() == 3 || !args[3]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - CreateLightweightTagBaton* baton = new CreateLightweightTagBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->oid = (git_oid *)malloc(sizeof(git_oid )); - baton->repoReference = Persistent::New(args.This()); - baton->repo = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->tag_nameReference = Persistent::New(args[0]); - const char * from_tag_name; - String::Utf8Value tag_name(args[0]->ToString()); - from_tag_name = strdup(*tag_name); - baton->tag_name = from_tag_name; - baton->targetReference = Persistent::New(args[1]); - const git_object * from_target; - from_target = ObjectWrap::Unwrap(args[1]->ToObject())->GetValue(); - baton->target = from_target; - baton->forceReference = Persistent::New(args[2]); - int from_force; - from_force = (int) args[2]->ToInt32()->Value(); - baton->force = from_force; - baton->callback = Persistent::New(Local::Cast(args[3])); - - uv_queue_work(uv_default_loop(), &baton->request, CreateLightweightTagWork, (uv_after_work_cb)CreateLightweightTagAfterWork); - - return Undefined(); -} - -void GitRepo::CreateLightweightTagWork(uv_work_t *req) { - CreateLightweightTagBaton *baton = static_cast(req->data); - int result = git_tag_create_lightweight( - baton->oid, - baton->repo, - baton->tag_name, - baton->target, - baton->force - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitRepo::CreateLightweightTagAfterWork(uv_work_t *req) { - HandleScope scope; - CreateLightweightTagBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle to; - if (baton->oid != NULL) { - to = GitOid::New((void *)baton->oid); - } else { - to = Null(); - } - Handle result = to; - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - free(baton->oid); - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->repoReference.Dispose(); - baton->tag_nameReference.Dispose(); - baton->targetReference.Dispose(); - baton->forceReference.Dispose(); - baton->callback.Dispose(); - free((void *)baton->tag_name); - delete baton; -} - -#include "../include/functions/copy.h" - -/** - * @param {Oid} id - * @param {Tree} callback - */ -Handle GitRepo::GetTree(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("Oid id is required."))); - } - - if (args.Length() == 1 || !args[1]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - GetTreeBaton* baton = new GetTreeBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->repoReference = Persistent::New(args.This()); - baton->repo = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->idReference = Persistent::New(args[0]); - const git_oid * from_id; - from_id = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - baton->id = from_id; - baton->callback = Persistent::New(Local::Cast(args[1])); - - uv_queue_work(uv_default_loop(), &baton->request, GetTreeWork, (uv_after_work_cb)GetTreeAfterWork); - - return Undefined(); -} - -void GitRepo::GetTreeWork(uv_work_t *req) { - GetTreeBaton *baton = static_cast(req->data); - int result = git_tree_lookup( - &baton->out, - baton->repo, - baton->id - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitRepo::GetTreeAfterWork(uv_work_t *req) { - HandleScope scope; - GetTreeBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle to; - if (baton->out != NULL) { - to = GitTree::New((void *)baton->out); - } else { - to = Null(); - } - Handle result = to; - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->repoReference.Dispose(); - baton->idReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -#include "../include/functions/copy.h" - -/** - */ -Handle GitRepo::ReloadSubmodules(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - ReloadSubmodulesBaton* baton = new ReloadSubmodulesBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->repoReference = Persistent::New(args.This()); - baton->repo = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->callback = Persistent::New(Local::Cast(args[0])); - - uv_queue_work(uv_default_loop(), &baton->request, ReloadSubmodulesWork, (uv_after_work_cb)ReloadSubmodulesAfterWork); - - return Undefined(); -} - -void GitRepo::ReloadSubmodulesWork(uv_work_t *req) { - ReloadSubmodulesBaton *baton = static_cast(req->data); - int result = git_submodule_reload_all( - baton->repo - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitRepo::ReloadSubmodulesAfterWork(uv_work_t *req) { - HandleScope scope; - ReloadSubmodulesBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle result = Local::New(Undefined()); - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->repoReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -#include "../include/functions/copy.h" - -/** - * @param {String} tag_name - */ -Handle GitRepo::Delete(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String tag_name is required."))); - } - - if (args.Length() == 1 || !args[1]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - DeleteBaton* baton = new DeleteBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->repoReference = Persistent::New(args.This()); - baton->repo = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->tag_nameReference = Persistent::New(args[0]); - const char * from_tag_name; - String::Utf8Value tag_name(args[0]->ToString()); - from_tag_name = strdup(*tag_name); - baton->tag_name = from_tag_name; - baton->callback = Persistent::New(Local::Cast(args[1])); - - uv_queue_work(uv_default_loop(), &baton->request, DeleteWork, (uv_after_work_cb)DeleteAfterWork); - - return Undefined(); -} - -void GitRepo::DeleteWork(uv_work_t *req) { - DeleteBaton *baton = static_cast(req->data); - int result = git_tag_delete( - baton->repo, - baton->tag_name - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitRepo::DeleteAfterWork(uv_work_t *req) { - HandleScope scope; - DeleteBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle result = Local::New(Undefined()); - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->repoReference.Dispose(); - baton->tag_nameReference.Dispose(); - baton->callback.Dispose(); - free((void *)baton->tag_name); - delete baton; -} - -#include "../include/functions/copy.h" - -/** - * @param {Number} list_flags - * @param {Array} callback - */ -Handle GitRepo::GetReferences(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 1 || !args[1]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - GetReferencesBaton* baton = new GetReferencesBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->array = (git_strarray *)malloc(sizeof(git_strarray )); - baton->repoReference = Persistent::New(args.This()); - baton->repo = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->list_flagsReference = Persistent::New(args[0]); - unsigned int from_list_flags; - if (args[0]->IsUint32()) { - from_list_flags = (unsigned int) args[0]->ToUint32()->Value(); - } else { - from_list_flags = 0; - } - baton->list_flags = from_list_flags; - baton->callback = Persistent::New(Local::Cast(args[1])); - - uv_queue_work(uv_default_loop(), &baton->request, GetReferencesWork, (uv_after_work_cb)GetReferencesAfterWork); - - return Undefined(); -} - -void GitRepo::GetReferencesWork(uv_work_t *req) { - GetReferencesBaton *baton = static_cast(req->data); - int result = git_reference_list( - baton->array, - baton->repo, - baton->list_flags - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitRepo::GetReferencesAfterWork(uv_work_t *req) { - HandleScope scope; - GetReferencesBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle to; - - Local tmpArray = Array::New(baton->array->count); - for (unsigned int i = 0; i < baton->array->count; i++) { - tmpArray->Set(Number::New(i), String::New(baton->array->strings[i])); - } - to = tmpArray; - Handle result = to; - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - free(baton->array); - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->repoReference.Dispose(); - baton->list_flagsReference.Dispose(); - baton->callback.Dispose(); - - git_strarray_free(baton->array); - delete baton; -} - -#include "../include/functions/copy.h" - -/** - * @param {Buffer} buffer - * @param {Number} len - * @param {Oid} callback - */ -Handle GitRepo::CreateBlobFromBuffer(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("Buffer buffer is required."))); - } - if (args.Length() == 1 || !args[1]->IsNumber()) { - return ThrowException(Exception::Error(String::New("Number len is required."))); - } - - if (args.Length() == 2 || !args[2]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - CreateBlobFromBufferBaton* baton = new CreateBlobFromBufferBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->oid = (git_oid *)malloc(sizeof(git_oid )); - baton->repoReference = Persistent::New(args.This()); - baton->repo = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->bufferReference = Persistent::New(args[0]); - const void * from_buffer; - from_buffer = Buffer::Data(args[0]->ToObject()); - baton->buffer = from_buffer; - baton->lenReference = Persistent::New(args[1]); - size_t from_len; - from_len = (size_t) args[1]->ToNumber()->Value(); - baton->len = from_len; - baton->callback = Persistent::New(Local::Cast(args[2])); - - uv_queue_work(uv_default_loop(), &baton->request, CreateBlobFromBufferWork, (uv_after_work_cb)CreateBlobFromBufferAfterWork); - - return Undefined(); -} - -void GitRepo::CreateBlobFromBufferWork(uv_work_t *req) { - CreateBlobFromBufferBaton *baton = static_cast(req->data); - int result = git_blob_create_frombuffer( - baton->oid, - baton->repo, - baton->buffer, - baton->len - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitRepo::CreateBlobFromBufferAfterWork(uv_work_t *req) { - HandleScope scope; - CreateBlobFromBufferBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle to; - if (baton->oid != NULL) { - to = GitOid::New((void *)baton->oid); - } else { - to = Null(); - } - Handle result = to; - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - free(baton->oid); - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->repoReference.Dispose(); - baton->bufferReference.Dispose(); - baton->lenReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -#include "../include/functions/copy.h" - -/** - * @param {String} path - * @param {Oid} callback - */ -Handle GitRepo::CreateBlobFromFile(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String path is required."))); - } - - if (args.Length() == 1 || !args[1]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - CreateBlobFromFileBaton* baton = new CreateBlobFromFileBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->id = (git_oid *)malloc(sizeof(git_oid )); - baton->repoReference = Persistent::New(args.This()); - baton->repo = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->pathReference = Persistent::New(args[0]); - const char * from_path; - String::Utf8Value path(args[0]->ToString()); - from_path = strdup(*path); - baton->path = from_path; - baton->callback = Persistent::New(Local::Cast(args[1])); - - uv_queue_work(uv_default_loop(), &baton->request, CreateBlobFromFileWork, (uv_after_work_cb)CreateBlobFromFileAfterWork); - - return Undefined(); -} - -void GitRepo::CreateBlobFromFileWork(uv_work_t *req) { - CreateBlobFromFileBaton *baton = static_cast(req->data); - int result = git_blob_create_fromdisk( - baton->id, - baton->repo, - baton->path - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitRepo::CreateBlobFromFileAfterWork(uv_work_t *req) { - HandleScope scope; - CreateBlobFromFileBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle to; - if (baton->id != NULL) { - to = GitOid::New((void *)baton->id); - } else { - to = Null(); - } - Handle result = to; - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - free(baton->id); - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->repoReference.Dispose(); - baton->pathReference.Dispose(); - baton->callback.Dispose(); - free((void *)baton->path); - delete baton; -} - -#include "../include/functions/copy.h" - -/** - * @param {Array} callback - */ -Handle GitRepo::GetRemotes(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - GetRemotesBaton* baton = new GetRemotesBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->out = (git_strarray *)malloc(sizeof(git_strarray )); - baton->repoReference = Persistent::New(args.This()); - baton->repo = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->callback = Persistent::New(Local::Cast(args[0])); - - uv_queue_work(uv_default_loop(), &baton->request, GetRemotesWork, (uv_after_work_cb)GetRemotesAfterWork); - - return Undefined(); -} - -void GitRepo::GetRemotesWork(uv_work_t *req) { - GetRemotesBaton *baton = static_cast(req->data); - int result = git_remote_list( - baton->out, - baton->repo - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitRepo::GetRemotesAfterWork(uv_work_t *req) { - HandleScope scope; - GetRemotesBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle to; - - Local tmpArray = Array::New(baton->out->count); - for (unsigned int i = 0; i < baton->out->count; i++) { - tmpArray->Set(Number::New(i), String::New(baton->out->strings[i])); - } - to = tmpArray; - Handle result = to; - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - free(baton->out); - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->repoReference.Dispose(); - baton->callback.Dispose(); - - git_strarray_free(baton->out); - delete baton; -} - -#include "../include/functions/copy.h" - -/** - * @param {String} url - * @param {String} local_path - * @param {CloneOptions} options - * @param {Repository} callback - */ -Handle GitRepo::Clone(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String url is required."))); - } - if (args.Length() == 1 || !args[1]->IsString()) { - return ThrowException(Exception::Error(String::New("String local_path is required."))); - } - - if (args.Length() == 3 || !args[3]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - CloneBaton* baton = new CloneBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->urlReference = Persistent::New(args[0]); - const char * from_url; - String::Utf8Value url(args[0]->ToString()); - from_url = strdup(*url); - baton->url = from_url; - baton->local_pathReference = Persistent::New(args[1]); - const char * from_local_path; - String::Utf8Value local_path(args[1]->ToString()); - from_local_path = strdup(*local_path); - baton->local_path = from_local_path; - baton->optionsReference = Persistent::New(args[2]); - const git_clone_options * from_options; - if (args[2]->IsObject()) { - from_options = ObjectWrap::Unwrap(args[2]->ToObject())->GetValue(); - } else { - from_options = 0; - } - baton->options = from_options; - baton->callback = Persistent::New(Local::Cast(args[3])); - - uv_queue_work(uv_default_loop(), &baton->request, CloneWork, (uv_after_work_cb)CloneAfterWork); - - return Undefined(); -} - -void GitRepo::CloneWork(uv_work_t *req) { - CloneBaton *baton = static_cast(req->data); - int result = git_clone( - &baton->out, - baton->url, - baton->local_path, - baton->options - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitRepo::CloneAfterWork(uv_work_t *req) { - HandleScope scope; - CloneBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle to; - if (baton->out != NULL) { - to = GitRepo::New((void *)baton->out); - } else { - to = Null(); - } - Handle result = to; - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->urlReference.Dispose(); - baton->local_pathReference.Dispose(); - baton->optionsReference.Dispose(); - baton->callback.Dispose(); - free((void *)baton->url); - free((void *)baton->local_path); - delete baton; -} - -/** - * @param {String} name - * @return {Remote} out - */ -Handle GitRepo::GetRemote(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String name is required."))); - } - - git_remote * out = 0; - const char * from_name; - String::Utf8Value name(args[0]->ToString()); - from_name = strdup(*name); - - int result = git_remote_load( - &out - , ObjectWrap::Unwrap(args.This())->GetValue() - , from_name - ); - free((void *)from_name); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - Handle to; - if (out != NULL) { - to = GitRemote::New((void *)out); - } else { - to = Null(); - } - return scope.Close(to); -} - -Persistent GitRepo::constructor_template; diff --git a/src/revwalk.cc b/src/revwalk.cc deleted file mode 100755 index 20d7a940b..000000000 --- a/src/revwalk.cc +++ /dev/null @@ -1,808 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ -#include -#include -#include - -#include "git2.h" - -#include "../include/functions/copy.h" - -#include "../include/revwalk.h" -#include "../include/oid.h" -#include "../include/repo.h" - -using namespace v8; -using namespace node; - -GitRevWalk::GitRevWalk(git_revwalk *raw) { - this->raw = raw; -} - -GitRevWalk::~GitRevWalk() { - git_revwalk_free(this->raw); -} - -void GitRevWalk::Initialize(Handle target) { - HandleScope scope; - - Local tpl = FunctionTemplate::New(New); - - tpl->InstanceTemplate()->SetInternalFieldCount(1); - tpl->SetClassName(String::NewSymbol("RevWalk")); - - NODE_SET_PROTOTYPE_METHOD(tpl, "reset", Reset); - NODE_SET_PROTOTYPE_METHOD(tpl, "push", Push); - NODE_SET_PROTOTYPE_METHOD(tpl, "pushGlob", PushGlob); - NODE_SET_PROTOTYPE_METHOD(tpl, "pushHead", PushHead); - NODE_SET_PROTOTYPE_METHOD(tpl, "hide", Hide); - NODE_SET_PROTOTYPE_METHOD(tpl, "hideGlob", HideGlob); - NODE_SET_PROTOTYPE_METHOD(tpl, "hideHead", HideHead); - NODE_SET_PROTOTYPE_METHOD(tpl, "pushRef", PushRef); - NODE_SET_PROTOTYPE_METHOD(tpl, "hideRef", HideRef); - NODE_SET_PROTOTYPE_METHOD(tpl, "next", Next); - NODE_SET_PROTOTYPE_METHOD(tpl, "sorting", Sorting); - - - constructor_template = Persistent::New(tpl->GetFunction()); - target->Set(String::NewSymbol("RevWalk"), constructor_template); -} - -Handle GitRevWalk::New(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsExternal()) { - return ThrowException(Exception::Error(String::New("git_revwalk is required."))); - } - - GitRevWalk* object = new GitRevWalk((git_revwalk *) External::Unwrap(args[0])); - object->Wrap(args.This()); - - return scope.Close(args.This()); -} - -Handle GitRevWalk::New(void *raw) { - HandleScope scope; - Handle argv[1] = { External::New((void *)raw) }; - return scope.Close(GitRevWalk::constructor_template->NewInstance(1, argv)); -} - -git_revwalk *GitRevWalk::GetValue() { - return this->raw; -} - - -/** - */ -Handle GitRevWalk::Reset(const Arguments& args) { - HandleScope scope; - - - git_revwalk_reset( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - return Undefined(); -} - -#include "../include/functions/copy.h" - -/** - * @param {Oid} id - */ -Handle GitRevWalk::Push(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("Oid id is required."))); - } - - if (args.Length() == 1 || !args[1]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - PushBaton* baton = new PushBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->walkReference = Persistent::New(args.This()); - baton->walk = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->idReference = Persistent::New(args[0]); - const git_oid * from_id; - from_id = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - baton->id = from_id; - baton->callback = Persistent::New(Local::Cast(args[1])); - - uv_queue_work(uv_default_loop(), &baton->request, PushWork, (uv_after_work_cb)PushAfterWork); - - return Undefined(); -} - -void GitRevWalk::PushWork(uv_work_t *req) { - PushBaton *baton = static_cast(req->data); - int result = git_revwalk_push( - baton->walk, - baton->id - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitRevWalk::PushAfterWork(uv_work_t *req) { - HandleScope scope; - PushBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle result = Local::New(Undefined()); - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->walkReference.Dispose(); - baton->idReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -#include "../include/functions/copy.h" - -/** - * @param {String} glob - */ -Handle GitRevWalk::PushGlob(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String glob is required."))); - } - - if (args.Length() == 1 || !args[1]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - PushGlobBaton* baton = new PushGlobBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->walkReference = Persistent::New(args.This()); - baton->walk = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->globReference = Persistent::New(args[0]); - const char * from_glob; - String::Utf8Value glob(args[0]->ToString()); - from_glob = strdup(*glob); - baton->glob = from_glob; - baton->callback = Persistent::New(Local::Cast(args[1])); - - uv_queue_work(uv_default_loop(), &baton->request, PushGlobWork, (uv_after_work_cb)PushGlobAfterWork); - - return Undefined(); -} - -void GitRevWalk::PushGlobWork(uv_work_t *req) { - PushGlobBaton *baton = static_cast(req->data); - int result = git_revwalk_push_glob( - baton->walk, - baton->glob - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitRevWalk::PushGlobAfterWork(uv_work_t *req) { - HandleScope scope; - PushGlobBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle result = Local::New(Undefined()); - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->walkReference.Dispose(); - baton->globReference.Dispose(); - baton->callback.Dispose(); - free((void *)baton->glob); - delete baton; -} - -#include "../include/functions/copy.h" - -/** - */ -Handle GitRevWalk::PushHead(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - PushHeadBaton* baton = new PushHeadBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->walkReference = Persistent::New(args.This()); - baton->walk = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->callback = Persistent::New(Local::Cast(args[0])); - - uv_queue_work(uv_default_loop(), &baton->request, PushHeadWork, (uv_after_work_cb)PushHeadAfterWork); - - return Undefined(); -} - -void GitRevWalk::PushHeadWork(uv_work_t *req) { - PushHeadBaton *baton = static_cast(req->data); - int result = git_revwalk_push_head( - baton->walk - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitRevWalk::PushHeadAfterWork(uv_work_t *req) { - HandleScope scope; - PushHeadBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle result = Local::New(Undefined()); - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->walkReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -#include "../include/functions/copy.h" - -/** - * @param {Oid} commit_id - */ -Handle GitRevWalk::Hide(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("Oid commit_id is required."))); - } - - if (args.Length() == 1 || !args[1]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - HideBaton* baton = new HideBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->walkReference = Persistent::New(args.This()); - baton->walk = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->commit_idReference = Persistent::New(args[0]); - const git_oid * from_commit_id; - from_commit_id = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - baton->commit_id = from_commit_id; - baton->callback = Persistent::New(Local::Cast(args[1])); - - uv_queue_work(uv_default_loop(), &baton->request, HideWork, (uv_after_work_cb)HideAfterWork); - - return Undefined(); -} - -void GitRevWalk::HideWork(uv_work_t *req) { - HideBaton *baton = static_cast(req->data); - int result = git_revwalk_hide( - baton->walk, - baton->commit_id - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitRevWalk::HideAfterWork(uv_work_t *req) { - HandleScope scope; - HideBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle result = Local::New(Undefined()); - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->walkReference.Dispose(); - baton->commit_idReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -#include "../include/functions/copy.h" - -/** - * @param {String} glob - */ -Handle GitRevWalk::HideGlob(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String glob is required."))); - } - - if (args.Length() == 1 || !args[1]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - HideGlobBaton* baton = new HideGlobBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->walkReference = Persistent::New(args.This()); - baton->walk = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->globReference = Persistent::New(args[0]); - const char * from_glob; - String::Utf8Value glob(args[0]->ToString()); - from_glob = strdup(*glob); - baton->glob = from_glob; - baton->callback = Persistent::New(Local::Cast(args[1])); - - uv_queue_work(uv_default_loop(), &baton->request, HideGlobWork, (uv_after_work_cb)HideGlobAfterWork); - - return Undefined(); -} - -void GitRevWalk::HideGlobWork(uv_work_t *req) { - HideGlobBaton *baton = static_cast(req->data); - int result = git_revwalk_hide_glob( - baton->walk, - baton->glob - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitRevWalk::HideGlobAfterWork(uv_work_t *req) { - HandleScope scope; - HideGlobBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle result = Local::New(Undefined()); - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->walkReference.Dispose(); - baton->globReference.Dispose(); - baton->callback.Dispose(); - free((void *)baton->glob); - delete baton; -} - -#include "../include/functions/copy.h" - -/** - */ -Handle GitRevWalk::HideHead(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - HideHeadBaton* baton = new HideHeadBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->walkReference = Persistent::New(args.This()); - baton->walk = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->callback = Persistent::New(Local::Cast(args[0])); - - uv_queue_work(uv_default_loop(), &baton->request, HideHeadWork, (uv_after_work_cb)HideHeadAfterWork); - - return Undefined(); -} - -void GitRevWalk::HideHeadWork(uv_work_t *req) { - HideHeadBaton *baton = static_cast(req->data); - int result = git_revwalk_hide_head( - baton->walk - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitRevWalk::HideHeadAfterWork(uv_work_t *req) { - HandleScope scope; - HideHeadBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle result = Local::New(Undefined()); - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->walkReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -#include "../include/functions/copy.h" - -/** - * @param {String} refname - */ -Handle GitRevWalk::PushRef(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String refname is required."))); - } - - if (args.Length() == 1 || !args[1]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - PushRefBaton* baton = new PushRefBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->walkReference = Persistent::New(args.This()); - baton->walk = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->refnameReference = Persistent::New(args[0]); - const char * from_refname; - String::Utf8Value refname(args[0]->ToString()); - from_refname = strdup(*refname); - baton->refname = from_refname; - baton->callback = Persistent::New(Local::Cast(args[1])); - - uv_queue_work(uv_default_loop(), &baton->request, PushRefWork, (uv_after_work_cb)PushRefAfterWork); - - return Undefined(); -} - -void GitRevWalk::PushRefWork(uv_work_t *req) { - PushRefBaton *baton = static_cast(req->data); - int result = git_revwalk_push_ref( - baton->walk, - baton->refname - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitRevWalk::PushRefAfterWork(uv_work_t *req) { - HandleScope scope; - PushRefBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle result = Local::New(Undefined()); - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->walkReference.Dispose(); - baton->refnameReference.Dispose(); - baton->callback.Dispose(); - free((void *)baton->refname); - delete baton; -} - -#include "../include/functions/copy.h" - -/** - * @param {String} refname - */ -Handle GitRevWalk::HideRef(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String refname is required."))); - } - - if (args.Length() == 1 || !args[1]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - HideRefBaton* baton = new HideRefBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->walkReference = Persistent::New(args.This()); - baton->walk = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->refnameReference = Persistent::New(args[0]); - const char * from_refname; - String::Utf8Value refname(args[0]->ToString()); - from_refname = strdup(*refname); - baton->refname = from_refname; - baton->callback = Persistent::New(Local::Cast(args[1])); - - uv_queue_work(uv_default_loop(), &baton->request, HideRefWork, (uv_after_work_cb)HideRefAfterWork); - - return Undefined(); -} - -void GitRevWalk::HideRefWork(uv_work_t *req) { - HideRefBaton *baton = static_cast(req->data); - int result = git_revwalk_hide_ref( - baton->walk, - baton->refname - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitRevWalk::HideRefAfterWork(uv_work_t *req) { - HandleScope scope; - HideRefBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle result = Local::New(Undefined()); - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->walkReference.Dispose(); - baton->refnameReference.Dispose(); - baton->callback.Dispose(); - free((void *)baton->refname); - delete baton; -} - -#include "../include/functions/copy.h" - -/** - * @param {Oid} callback - */ -Handle GitRevWalk::Next(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - NextBaton* baton = new NextBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->out = (git_oid *)malloc(sizeof(git_oid )); - baton->walkReference = Persistent::New(args.This()); - baton->walk = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->callback = Persistent::New(Local::Cast(args[0])); - - uv_queue_work(uv_default_loop(), &baton->request, NextWork, (uv_after_work_cb)NextAfterWork); - - return Undefined(); -} - -void GitRevWalk::NextWork(uv_work_t *req) { - NextBaton *baton = static_cast(req->data); - int result = git_revwalk_next( - baton->out, - baton->walk - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitRevWalk::NextAfterWork(uv_work_t *req) { - HandleScope scope; - NextBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle to; - if (baton->out != NULL) { - to = GitOid::New((void *)baton->out); - } else { - to = Null(); - } - Handle result = to; - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - free(baton->out); - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->walkReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -/** - * @param {Number} sort_mode - */ -Handle GitRevWalk::Sorting(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsUint32()) { - return ThrowException(Exception::Error(String::New("Number sort_mode is required."))); - } - - unsigned int from_sort_mode; - from_sort_mode = (unsigned int) args[0]->ToUint32()->Value(); - - git_revwalk_sorting( - ObjectWrap::Unwrap(args.This())->GetValue() - , from_sort_mode - ); - - return Undefined(); -} - -Persistent GitRevWalk::constructor_template; diff --git a/src/signature.cc b/src/signature.cc deleted file mode 100755 index d355896eb..000000000 --- a/src/signature.cc +++ /dev/null @@ -1,215 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ -#include -#include -#include - -#include "git2.h" - -#include "../include/functions/copy.h" - -#include "../include/signature.h" -#include "../include/time.h" - -using namespace v8; -using namespace node; - -GitSignature::GitSignature(git_signature *raw) { - this->raw = raw; -} - -GitSignature::~GitSignature() { - git_signature_free(this->raw); -} - -void GitSignature::Initialize(Handle target) { - HandleScope scope; - - Local tpl = FunctionTemplate::New(New); - - tpl->InstanceTemplate()->SetInternalFieldCount(1); - tpl->SetClassName(String::NewSymbol("Signature")); - - NODE_SET_METHOD(tpl, "create", Create); - NODE_SET_METHOD(tpl, "now", Now); - - NODE_SET_PROTOTYPE_METHOD(tpl, "name", Name); - NODE_SET_PROTOTYPE_METHOD(tpl, "email", Email); - NODE_SET_PROTOTYPE_METHOD(tpl, "time", Time); - - constructor_template = Persistent::New(tpl->GetFunction()); - target->Set(String::NewSymbol("Signature"), constructor_template); -} - -Handle GitSignature::New(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsExternal()) { - return ThrowException(Exception::Error(String::New("git_signature is required."))); - } - - GitSignature* object = new GitSignature((git_signature *) External::Unwrap(args[0])); - object->Wrap(args.This()); - - return scope.Close(args.This()); -} - -Handle GitSignature::New(void *raw) { - HandleScope scope; - Handle argv[1] = { External::New((void *)raw) }; - return scope.Close(GitSignature::constructor_template->NewInstance(1, argv)); -} - -git_signature *GitSignature::GetValue() { - return this->raw; -} - - -/** - * @param {String} name - * @param {String} email - * @param {Number} time - * @param {Number} offset - * @return {Signature} out - */ -Handle GitSignature::Create(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String name is required."))); - } - if (args.Length() == 1 || !args[1]->IsString()) { - return ThrowException(Exception::Error(String::New("String email is required."))); - } - if (args.Length() == 2 || !args[2]->IsInt32()) { - return ThrowException(Exception::Error(String::New("Number time is required."))); - } - if (args.Length() == 3 || !args[3]->IsInt32()) { - return ThrowException(Exception::Error(String::New("Number offset is required."))); - } - - git_signature * out = 0; - const char * from_name; - String::Utf8Value name(args[0]->ToString()); - from_name = strdup(*name); - const char * from_email; - String::Utf8Value email(args[1]->ToString()); - from_email = strdup(*email); - git_time_t from_time; - from_time = (git_time_t) args[2]->ToInt32()->Value(); - int from_offset; - from_offset = (int) args[3]->ToInt32()->Value(); - - int result = git_signature_new( - &out - , from_name - , from_email - , from_time - , from_offset - ); - free((void *)from_name); - free((void *)from_email); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - Handle to; - if (out != NULL) { - to = GitSignature::New((void *)out); - } else { - to = Null(); - } - return scope.Close(to); -} - -/** - * @param {String} name - * @param {String} email - * @return {Signature} out - */ -Handle GitSignature::Now(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String name is required."))); - } - if (args.Length() == 1 || !args[1]->IsString()) { - return ThrowException(Exception::Error(String::New("String email is required."))); - } - - git_signature * out = 0; - const char * from_name; - String::Utf8Value name(args[0]->ToString()); - from_name = strdup(*name); - const char * from_email; - String::Utf8Value email(args[1]->ToString()); - from_email = strdup(*email); - - int result = git_signature_now( - &out - , from_name - , from_email - ); - free((void *)from_name); - free((void *)from_email); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - Handle to; - if (out != NULL) { - to = GitSignature::New((void *)out); - } else { - to = Null(); - } - return scope.Close(to); -} - -Handle GitSignature::Name(const Arguments& args) { - HandleScope scope; - Handle to; - - const char * name = - ObjectWrap::Unwrap(args.This())->GetValue()->name; - - to = String::New(name); - return scope.Close(to); -} - -Handle GitSignature::Email(const Arguments& args) { - HandleScope scope; - Handle to; - - const char * email = - ObjectWrap::Unwrap(args.This())->GetValue()->email; - - to = String::New(email); - return scope.Close(to); -} - -Handle GitSignature::Time(const Arguments& args) { - HandleScope scope; - Handle to; - - git_time *when = - &ObjectWrap::Unwrap(args.This())->GetValue()->when; - - if (when != NULL) { - when = (git_time *)git_time_dup(when); - } - if (when != NULL) { - to = GitTime::New((void *)when); - } else { - to = Null(); - } - return scope.Close(to); -} - -Persistent GitSignature::constructor_template; diff --git a/src/submodule.cc b/src/submodule.cc deleted file mode 100644 index 2b355fc9c..000000000 --- a/src/submodule.cc +++ /dev/null @@ -1,792 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ -#include -#include -#include - -#include "git2.h" - -#include "../include/functions/copy.h" - -#include "../include/submodule.h" -#include "../include/oid.h" -#include "../include/repo.h" - -using namespace v8; -using namespace node; - -GitSubmodule::GitSubmodule(git_submodule *raw) { - this->raw = raw; -} - -GitSubmodule::~GitSubmodule() { - free(this->raw); -} - -void GitSubmodule::Initialize(Handle target) { - HandleScope scope; - - Local tpl = FunctionTemplate::New(New); - - tpl->InstanceTemplate()->SetInternalFieldCount(1); - tpl->SetClassName(String::NewSymbol("Submodule")); - - NODE_SET_PROTOTYPE_METHOD(tpl, "addFinalize", AddFinalize); - NODE_SET_PROTOTYPE_METHOD(tpl, "addToIndex", AddToIndex); - NODE_SET_PROTOTYPE_METHOD(tpl, "save", Save); - NODE_SET_PROTOTYPE_METHOD(tpl, "name", Name); - NODE_SET_PROTOTYPE_METHOD(tpl, "path", Path); - NODE_SET_PROTOTYPE_METHOD(tpl, "url", Url); - NODE_SET_PROTOTYPE_METHOD(tpl, "setUrl", SetUrl); - NODE_SET_PROTOTYPE_METHOD(tpl, "indexId", IndexId); - NODE_SET_PROTOTYPE_METHOD(tpl, "headId", HeadId); - NODE_SET_PROTOTYPE_METHOD(tpl, "init", Init); - NODE_SET_PROTOTYPE_METHOD(tpl, "sync", Sync); - NODE_SET_PROTOTYPE_METHOD(tpl, "open", Open); - NODE_SET_PROTOTYPE_METHOD(tpl, "reload", Reload); - NODE_SET_PROTOTYPE_METHOD(tpl, "status", Status); - - - constructor_template = Persistent::New(tpl->GetFunction()); - target->Set(String::NewSymbol("Submodule"), constructor_template); -} - -Handle GitSubmodule::New(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsExternal()) { - return ThrowException(Exception::Error(String::New("git_submodule is required."))); - } - - GitSubmodule* object = new GitSubmodule((git_submodule *) External::Unwrap(args[0])); - object->Wrap(args.This()); - - return scope.Close(args.This()); -} - -Handle GitSubmodule::New(void *raw) { - HandleScope scope; - Handle argv[1] = { External::New((void *)raw) }; - return scope.Close(GitSubmodule::constructor_template->NewInstance(1, argv)); -} - -git_submodule *GitSubmodule::GetValue() { - return this->raw; -} - - -#include "../include/functions/copy.h" - -/** - */ -Handle GitSubmodule::AddFinalize(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - AddFinalizeBaton* baton = new AddFinalizeBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->submoduleReference = Persistent::New(args.This()); - baton->submodule = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->callback = Persistent::New(Local::Cast(args[0])); - - uv_queue_work(uv_default_loop(), &baton->request, AddFinalizeWork, (uv_after_work_cb)AddFinalizeAfterWork); - - return Undefined(); -} - -void GitSubmodule::AddFinalizeWork(uv_work_t *req) { - AddFinalizeBaton *baton = static_cast(req->data); - int result = git_submodule_add_finalize( - baton->submodule - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitSubmodule::AddFinalizeAfterWork(uv_work_t *req) { - HandleScope scope; - AddFinalizeBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle result = Local::New(Undefined()); - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->submoduleReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -#include "../include/functions/copy.h" - -/** - * @param {Number} write_index - */ -Handle GitSubmodule::AddToIndex(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsInt32()) { - return ThrowException(Exception::Error(String::New("Number write_index is required."))); - } - - if (args.Length() == 1 || !args[1]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - AddToIndexBaton* baton = new AddToIndexBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->submoduleReference = Persistent::New(args.This()); - baton->submodule = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->write_indexReference = Persistent::New(args[0]); - int from_write_index; - from_write_index = (int) args[0]->ToInt32()->Value(); - baton->write_index = from_write_index; - baton->callback = Persistent::New(Local::Cast(args[1])); - - uv_queue_work(uv_default_loop(), &baton->request, AddToIndexWork, (uv_after_work_cb)AddToIndexAfterWork); - - return Undefined(); -} - -void GitSubmodule::AddToIndexWork(uv_work_t *req) { - AddToIndexBaton *baton = static_cast(req->data); - int result = git_submodule_add_to_index( - baton->submodule, - baton->write_index - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitSubmodule::AddToIndexAfterWork(uv_work_t *req) { - HandleScope scope; - AddToIndexBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle result = Local::New(Undefined()); - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->submoduleReference.Dispose(); - baton->write_indexReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -#include "../include/functions/copy.h" - -/** - */ -Handle GitSubmodule::Save(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - SaveBaton* baton = new SaveBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->submoduleReference = Persistent::New(args.This()); - baton->submodule = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->callback = Persistent::New(Local::Cast(args[0])); - - uv_queue_work(uv_default_loop(), &baton->request, SaveWork, (uv_after_work_cb)SaveAfterWork); - - return Undefined(); -} - -void GitSubmodule::SaveWork(uv_work_t *req) { - SaveBaton *baton = static_cast(req->data); - int result = git_submodule_save( - baton->submodule - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitSubmodule::SaveAfterWork(uv_work_t *req) { - HandleScope scope; - SaveBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle result = Local::New(Undefined()); - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->submoduleReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -/** - * @return {String} result - */ -Handle GitSubmodule::Name(const Arguments& args) { - HandleScope scope; - - - const char * result = git_submodule_name( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - to = String::New(result); - return scope.Close(to); -} - -/** - * @return {String} result - */ -Handle GitSubmodule::Path(const Arguments& args) { - HandleScope scope; - - - const char * result = git_submodule_path( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - to = String::New(result); - return scope.Close(to); -} - -/** - * @return {String} result - */ -Handle GitSubmodule::Url(const Arguments& args) { - HandleScope scope; - - - const char * result = git_submodule_url( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - to = String::New(result); - return scope.Close(to); -} - -/** - * @param {String} url - */ -Handle GitSubmodule::SetUrl(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String url is required."))); - } - - const char * from_url; - String::Utf8Value url(args[0]->ToString()); - from_url = strdup(*url); - - int result = git_submodule_set_url( - ObjectWrap::Unwrap(args.This())->GetValue() - , from_url - ); - free((void *)from_url); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - return Undefined(); -} - -/** - * @return {Oid} result - */ -Handle GitSubmodule::IndexId(const Arguments& args) { - HandleScope scope; - - - const git_oid * result = git_submodule_index_id( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - if (result != NULL) { - result = (const git_oid * )git_oid_dup(result); - } - if (result != NULL) { - to = GitOid::New((void *)result); - } else { - to = Null(); - } - return scope.Close(to); -} - -/** - * @return {Oid} result - */ -Handle GitSubmodule::HeadId(const Arguments& args) { - HandleScope scope; - - - const git_oid * result = git_submodule_head_id( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - if (result != NULL) { - result = (const git_oid * )git_oid_dup(result); - } - if (result != NULL) { - to = GitOid::New((void *)result); - } else { - to = Null(); - } - return scope.Close(to); -} - -#include "../include/functions/copy.h" - -/** - * @param {Number} overwrite - */ -Handle GitSubmodule::Init(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsInt32()) { - return ThrowException(Exception::Error(String::New("Number overwrite is required."))); - } - - if (args.Length() == 1 || !args[1]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - InitBaton* baton = new InitBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->submoduleReference = Persistent::New(args.This()); - baton->submodule = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->overwriteReference = Persistent::New(args[0]); - int from_overwrite; - from_overwrite = (int) args[0]->ToInt32()->Value(); - baton->overwrite = from_overwrite; - baton->callback = Persistent::New(Local::Cast(args[1])); - - uv_queue_work(uv_default_loop(), &baton->request, InitWork, (uv_after_work_cb)InitAfterWork); - - return Undefined(); -} - -void GitSubmodule::InitWork(uv_work_t *req) { - InitBaton *baton = static_cast(req->data); - int result = git_submodule_init( - baton->submodule, - baton->overwrite - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitSubmodule::InitAfterWork(uv_work_t *req) { - HandleScope scope; - InitBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle result = Local::New(Undefined()); - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->submoduleReference.Dispose(); - baton->overwriteReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -#include "../include/functions/copy.h" - -/** - */ -Handle GitSubmodule::Sync(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - SyncBaton* baton = new SyncBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->submoduleReference = Persistent::New(args.This()); - baton->submodule = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->callback = Persistent::New(Local::Cast(args[0])); - - uv_queue_work(uv_default_loop(), &baton->request, SyncWork, (uv_after_work_cb)SyncAfterWork); - - return Undefined(); -} - -void GitSubmodule::SyncWork(uv_work_t *req) { - SyncBaton *baton = static_cast(req->data); - int result = git_submodule_sync( - baton->submodule - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitSubmodule::SyncAfterWork(uv_work_t *req) { - HandleScope scope; - SyncBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle result = Local::New(Undefined()); - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->submoduleReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -#include "../include/functions/copy.h" - -/** - * @param {Repository} callback - */ -Handle GitSubmodule::Open(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - OpenBaton* baton = new OpenBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->submoduleReference = Persistent::New(args.This()); - baton->submodule = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->callback = Persistent::New(Local::Cast(args[0])); - - uv_queue_work(uv_default_loop(), &baton->request, OpenWork, (uv_after_work_cb)OpenAfterWork); - - return Undefined(); -} - -void GitSubmodule::OpenWork(uv_work_t *req) { - OpenBaton *baton = static_cast(req->data); - int result = git_submodule_open( - &baton->repo, - baton->submodule - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitSubmodule::OpenAfterWork(uv_work_t *req) { - HandleScope scope; - OpenBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle to; - if (baton->repo != NULL) { - to = GitRepo::New((void *)baton->repo); - } else { - to = Null(); - } - Handle result = to; - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->submoduleReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -#include "../include/functions/copy.h" - -/** - */ -Handle GitSubmodule::Reload(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - ReloadBaton* baton = new ReloadBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->submoduleReference = Persistent::New(args.This()); - baton->submodule = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->callback = Persistent::New(Local::Cast(args[0])); - - uv_queue_work(uv_default_loop(), &baton->request, ReloadWork, (uv_after_work_cb)ReloadAfterWork); - - return Undefined(); -} - -void GitSubmodule::ReloadWork(uv_work_t *req) { - ReloadBaton *baton = static_cast(req->data); - int result = git_submodule_reload( - baton->submodule - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitSubmodule::ReloadAfterWork(uv_work_t *req) { - HandleScope scope; - ReloadBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle result = Local::New(Undefined()); - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->submoduleReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -#include "../include/functions/copy.h" - -/** - * @param {Number} status - */ -Handle GitSubmodule::Status(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsInt32()) { - return ThrowException(Exception::Error(String::New("Number status is required."))); - } - - if (args.Length() == 1 || !args[1]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - StatusBaton* baton = new StatusBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->statusReference = Persistent::New(args[0]); - unsigned int * from_status; - from_status = (unsigned int *) args[0]->ToInt32()->Value(); - baton->status = from_status; - baton->submoduleReference = Persistent::New(args.This()); - baton->submodule = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->callback = Persistent::New(Local::Cast(args[1])); - - uv_queue_work(uv_default_loop(), &baton->request, StatusWork, (uv_after_work_cb)StatusAfterWork); - - return Undefined(); -} - -void GitSubmodule::StatusWork(uv_work_t *req) { - StatusBaton *baton = static_cast(req->data); - int result = git_submodule_status( - baton->status, - baton->submodule - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitSubmodule::StatusAfterWork(uv_work_t *req) { - HandleScope scope; - StatusBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle result = Local::New(Undefined()); - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->statusReference.Dispose(); - baton->submoduleReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -Persistent GitSubmodule::constructor_template; diff --git a/src/tag.cc b/src/tag.cc deleted file mode 100644 index 08b099d41..000000000 --- a/src/tag.cc +++ /dev/null @@ -1,304 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ -#include -#include -#include - -#include "git2.h" - -#include "../include/functions/copy.h" - -#include "../include/tag.h" -#include "../include/oid.h" -#include "../include/repo.h" -#include "../include/object.h" -#include "../include/signature.h" - -using namespace v8; -using namespace node; - -GitTag::GitTag(git_tag *raw) { - this->raw = raw; -} - -GitTag::~GitTag() { - git_tag_free(this->raw); -} - -void GitTag::Initialize(Handle target) { - HandleScope scope; - - Local tpl = FunctionTemplate::New(New); - - tpl->InstanceTemplate()->SetInternalFieldCount(1); - tpl->SetClassName(String::NewSymbol("Tag")); - - NODE_SET_PROTOTYPE_METHOD(tpl, "oid", Oid); - NODE_SET_PROTOTYPE_METHOD(tpl, "getTarget", GetTarget); - NODE_SET_PROTOTYPE_METHOD(tpl, "targetId", TargetId); - NODE_SET_PROTOTYPE_METHOD(tpl, "targetType", TargetType); - NODE_SET_PROTOTYPE_METHOD(tpl, "name", Name); - NODE_SET_PROTOTYPE_METHOD(tpl, "tagger", Tagger); - NODE_SET_PROTOTYPE_METHOD(tpl, "message", Message); - NODE_SET_PROTOTYPE_METHOD(tpl, "peel", Peel); - - - constructor_template = Persistent::New(tpl->GetFunction()); - target->Set(String::NewSymbol("Tag"), constructor_template); -} - -Handle GitTag::New(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsExternal()) { - return ThrowException(Exception::Error(String::New("git_tag is required."))); - } - - GitTag* object = new GitTag((git_tag *) External::Unwrap(args[0])); - object->Wrap(args.This()); - - return scope.Close(args.This()); -} - -Handle GitTag::New(void *raw) { - HandleScope scope; - Handle argv[1] = { External::New((void *)raw) }; - return scope.Close(GitTag::constructor_template->NewInstance(1, argv)); -} - -git_tag *GitTag::GetValue() { - return this->raw; -} - - -/** - * @return {Oid} result - */ -Handle GitTag::Oid(const Arguments& args) { - HandleScope scope; - - - const git_oid * result = git_tag_id( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - if (result != NULL) { - result = (const git_oid * )git_oid_dup(result); - } - if (result != NULL) { - to = GitOid::New((void *)result); - } else { - to = Null(); - } - return scope.Close(to); -} - -#include "../include/functions/copy.h" - -/** - * @param {Object} callback - */ -Handle GitTag::GetTarget(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - GetTargetBaton* baton = new GetTargetBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->tagReference = Persistent::New(args.This()); - baton->tag = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->callback = Persistent::New(Local::Cast(args[0])); - - uv_queue_work(uv_default_loop(), &baton->request, GetTargetWork, (uv_after_work_cb)GetTargetAfterWork); - - return Undefined(); -} - -void GitTag::GetTargetWork(uv_work_t *req) { - GetTargetBaton *baton = static_cast(req->data); - int result = git_tag_target( - &baton->target_out, - baton->tag - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitTag::GetTargetAfterWork(uv_work_t *req) { - HandleScope scope; - GetTargetBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle to; - if (baton->target_out != NULL) { - to = GitObject::New((void *)baton->target_out); - } else { - to = Null(); - } - Handle result = to; - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->tagReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -/** - * @return {Oid} result - */ -Handle GitTag::TargetId(const Arguments& args) { - HandleScope scope; - - - const git_oid * result = git_tag_target_id( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - if (result != NULL) { - result = (const git_oid * )git_oid_dup(result); - } - if (result != NULL) { - to = GitOid::New((void *)result); - } else { - to = Null(); - } - return scope.Close(to); -} - -/** - * @return {Number} result - */ -Handle GitTag::TargetType(const Arguments& args) { - HandleScope scope; - - - git_otype result = git_tag_target_type( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - to = Int32::New(result); - return scope.Close(to); -} - -/** - * @return {String} result - */ -Handle GitTag::Name(const Arguments& args) { - HandleScope scope; - - - const char * result = git_tag_name( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - to = String::New(result); - return scope.Close(to); -} - -/** - * @return {Signature} result - */ -Handle GitTag::Tagger(const Arguments& args) { - HandleScope scope; - - - const git_signature * result = git_tag_tagger( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - if (result != NULL) { - result = (const git_signature * )git_signature_dup(result); - } - if (result != NULL) { - to = GitSignature::New((void *)result); - } else { - to = Null(); - } - return scope.Close(to); -} - -/** - * @return {String} result - */ -Handle GitTag::Message(const Arguments& args) { - HandleScope scope; - - - const char * result = git_tag_message( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - to = String::New(result); - return scope.Close(to); -} - -/** - * @param {Tag} tag - * @return {Object} tag_target_out - */ -Handle GitTag::Peel(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("Tag tag is required."))); - } - - git_object * tag_target_out = 0; - const git_tag * from_tag; - from_tag = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - - int result = git_tag_peel( - &tag_target_out - , from_tag - ); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - Handle to; - if (tag_target_out != NULL) { - to = GitObject::New((void *)tag_target_out); - } else { - to = Null(); - } - return scope.Close(to); -} - -Persistent GitTag::constructor_template; diff --git a/src/threads.cc b/src/threads.cc deleted file mode 100755 index 020600206..000000000 --- a/src/threads.cc +++ /dev/null @@ -1,59 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ -#include -#include -#include - -#include "git2.h" - -#include "../include/functions/copy.h" - -#include "../include/threads.h" - -using namespace v8; -using namespace node; - -void GitThreads::Initialize(Handle target) { - HandleScope scope; - - Persistent object = Persistent::New(Object::New()); - - object->Set(String::NewSymbol("init"), FunctionTemplate::New(Init)->GetFunction()); - object->Set(String::NewSymbol("shutdown"), FunctionTemplate::New(Shutdown)->GetFunction()); - - target->Set(String::NewSymbol("Threads"), object); -} - - -/** - */ -Handle GitThreads::Init(const Arguments& args) { - HandleScope scope; - - - int result = git_threads_init( - ); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - return Undefined(); -} - -/** - */ -Handle GitThreads::Shutdown(const Arguments& args) { - HandleScope scope; - - - git_threads_shutdown( - ); - - return Undefined(); -} - diff --git a/src/time.cc b/src/time.cc deleted file mode 100644 index b03a49c84..000000000 --- a/src/time.cc +++ /dev/null @@ -1,87 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ -#include -#include -#include - -#include "git2.h" - -#include "../include/functions/copy.h" - -#include "../include/time.h" - -using namespace v8; -using namespace node; - -GitTime::GitTime(git_time *raw) { - this->raw = raw; -} - -GitTime::~GitTime() { - free(this->raw); -} - -void GitTime::Initialize(Handle target) { - HandleScope scope; - - Local tpl = FunctionTemplate::New(New); - - tpl->InstanceTemplate()->SetInternalFieldCount(1); - tpl->SetClassName(String::NewSymbol("Time")); - - - NODE_SET_PROTOTYPE_METHOD(tpl, "time", Time); - NODE_SET_PROTOTYPE_METHOD(tpl, "offset", Offset); - - constructor_template = Persistent::New(tpl->GetFunction()); - target->Set(String::NewSymbol("Time"), constructor_template); -} - -Handle GitTime::New(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsExternal()) { - return ThrowException(Exception::Error(String::New("git_time is required."))); - } - - GitTime* object = new GitTime((git_time *) External::Unwrap(args[0])); - object->Wrap(args.This()); - - return scope.Close(args.This()); -} - -Handle GitTime::New(void *raw) { - HandleScope scope; - Handle argv[1] = { External::New((void *)raw) }; - return scope.Close(GitTime::constructor_template->NewInstance(1, argv)); -} - -git_time *GitTime::GetValue() { - return this->raw; -} - - -Handle GitTime::Time(const Arguments& args) { - HandleScope scope; - Handle to; - - git_time_t time = - ObjectWrap::Unwrap(args.This())->GetValue()->time; - - to = Integer::New(time); - return scope.Close(to); -} - -Handle GitTime::Offset(const Arguments& args) { - HandleScope scope; - Handle to; - - int offset = - ObjectWrap::Unwrap(args.This())->GetValue()->offset; - - to = Int32::New(offset); - return scope.Close(to); -} - -Persistent GitTime::constructor_template; diff --git a/src/tree.cc b/src/tree.cc deleted file mode 100755 index 8a2661ce1..000000000 --- a/src/tree.cc +++ /dev/null @@ -1,644 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ -#include -#include -#include - -#include "git2.h" - -#include "../include/functions/copy.h" - -#include "../include/tree.h" -#include "../include/repo.h" -#include "../include/oid.h" -#include "../include/tree_entry.h" -#include "../include/diff_list.h" -#include "../include/diff_options.h" -#include "../include/tree_builder.h" -#include "../include/index.h" - -using namespace v8; -using namespace node; - -GitTree::GitTree(git_tree *raw) { - this->raw = raw; -} - -GitTree::~GitTree() { - git_tree_free(this->raw); -} - -void GitTree::Initialize(Handle target) { - HandleScope scope; - - Local tpl = FunctionTemplate::New(New); - - tpl->InstanceTemplate()->SetInternalFieldCount(1); - tpl->SetClassName(String::NewSymbol("Tree")); - - NODE_SET_PROTOTYPE_METHOD(tpl, "oid", Oid); - NODE_SET_PROTOTYPE_METHOD(tpl, "size", Size); - NODE_SET_PROTOTYPE_METHOD(tpl, "entryByName", EntryByName); - NODE_SET_PROTOTYPE_METHOD(tpl, "entryByIndex", EntryByIndex); - NODE_SET_PROTOTYPE_METHOD(tpl, "entryByOid", EntryByOid); - NODE_SET_PROTOTYPE_METHOD(tpl, "getEntry", GetEntry); - NODE_SET_PROTOTYPE_METHOD(tpl, "builder", Builder); - NODE_SET_PROTOTYPE_METHOD(tpl, "diffTree", DiffTree); - NODE_SET_PROTOTYPE_METHOD(tpl, "diffIndex", DiffIndex); - NODE_SET_PROTOTYPE_METHOD(tpl, "diffWorkDir", DiffWorkDir); - - - constructor_template = Persistent::New(tpl->GetFunction()); - target->Set(String::NewSymbol("Tree"), constructor_template); -} - -Handle GitTree::New(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsExternal()) { - return ThrowException(Exception::Error(String::New("git_tree is required."))); - } - - GitTree* object = new GitTree((git_tree *) External::Unwrap(args[0])); - object->Wrap(args.This()); - - return scope.Close(args.This()); -} - -Handle GitTree::New(void *raw) { - HandleScope scope; - Handle argv[1] = { External::New((void *)raw) }; - return scope.Close(GitTree::constructor_template->NewInstance(1, argv)); -} - -git_tree *GitTree::GetValue() { - return this->raw; -} - - -/** - * @return {Oid} result - */ -Handle GitTree::Oid(const Arguments& args) { - HandleScope scope; - - - const git_oid * result = git_tree_id( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - if (result != NULL) { - result = (const git_oid * )git_oid_dup(result); - } - if (result != NULL) { - to = GitOid::New((void *)result); - } else { - to = Null(); - } - return scope.Close(to); -} - -/** - * @return {Number} result - */ -Handle GitTree::Size(const Arguments& args) { - HandleScope scope; - - - size_t result = git_tree_entrycount( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - to = Uint32::New(result); - return scope.Close(to); -} - -/** - * @param {String} filename - * @return {TreeEntry} result - */ -Handle GitTree::EntryByName(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String filename is required."))); - } - - const char * from_filename; - String::Utf8Value filename(args[0]->ToString()); - from_filename = strdup(*filename); - - const git_tree_entry * result = git_tree_entry_byname( - ObjectWrap::Unwrap(args.This())->GetValue() - , from_filename - ); - free((void *)from_filename); - - Handle to; - if (result != NULL) { - result = (const git_tree_entry * )git_tree_entry_dup(result); - } - if (result != NULL) { - to = GitTreeEntry::New((void *)result); - } else { - to = Null(); - } - return scope.Close(to); -} - -/** - * @param {Number} idx - * @return {TreeEntry} result - */ -Handle GitTree::EntryByIndex(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsUint32()) { - return ThrowException(Exception::Error(String::New("Number idx is required."))); - } - - size_t from_idx; - from_idx = (size_t) args[0]->ToUint32()->Value(); - - const git_tree_entry * result = git_tree_entry_byindex( - ObjectWrap::Unwrap(args.This())->GetValue() - , from_idx - ); - - Handle to; - if (result != NULL) { - result = (const git_tree_entry * )git_tree_entry_dup(result); - } - if (result != NULL) { - to = GitTreeEntry::New((void *)result); - } else { - to = Null(); - } - return scope.Close(to); -} - -/** - * @param {Oid} oid - * @return {TreeEntry} result - */ -Handle GitTree::EntryByOid(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("Oid oid is required."))); - } - - const git_oid * from_oid; - from_oid = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - - const git_tree_entry * result = git_tree_entry_byoid( - ObjectWrap::Unwrap(args.This())->GetValue() - , from_oid - ); - - Handle to; - if (result != NULL) { - result = (const git_tree_entry * )git_tree_entry_dup(result); - } - if (result != NULL) { - to = GitTreeEntry::New((void *)result); - } else { - to = Null(); - } - return scope.Close(to); -} - -#include "../include/functions/copy.h" - -/** - * @param {String} path - * @param {TreeEntry} callback - */ -Handle GitTree::GetEntry(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String path is required."))); - } - - if (args.Length() == 1 || !args[1]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - GetEntryBaton* baton = new GetEntryBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->rootReference = Persistent::New(args.This()); - baton->root = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->pathReference = Persistent::New(args[0]); - const char * from_path; - String::Utf8Value path(args[0]->ToString()); - from_path = strdup(*path); - baton->path = from_path; - baton->callback = Persistent::New(Local::Cast(args[1])); - - uv_queue_work(uv_default_loop(), &baton->request, GetEntryWork, (uv_after_work_cb)GetEntryAfterWork); - - return Undefined(); -} - -void GitTree::GetEntryWork(uv_work_t *req) { - GetEntryBaton *baton = static_cast(req->data); - int result = git_tree_entry_bypath( - &baton->out, - baton->root, - baton->path - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitTree::GetEntryAfterWork(uv_work_t *req) { - HandleScope scope; - GetEntryBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle to; - if (baton->out != NULL) { - to = GitTreeEntry::New((void *)baton->out); - } else { - to = Null(); - } - Handle result = to; - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->rootReference.Dispose(); - baton->pathReference.Dispose(); - baton->callback.Dispose(); - free((void *)baton->path); - delete baton; -} - -/** - * @return {TreeBuilder} out - */ -Handle GitTree::Builder(const Arguments& args) { - HandleScope scope; - - git_treebuilder * out = 0; - - int result = git_treebuilder_create( - &out - , ObjectWrap::Unwrap(args.This())->GetValue() - ); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - Handle to; - if (out != NULL) { - to = GitTreeBuilder::New((void *)out); - } else { - to = Null(); - } - return scope.Close(to); -} - -#include "../include/functions/copy.h" - -/** - * @param {Repository} repo - * @param {Tree} new_tree - * @param {DiffOptions} opts - * @param {DiffList} callback - */ -Handle GitTree::DiffTree(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("Repository repo is required."))); - } - if (args.Length() == 1 || !args[1]->IsObject()) { - return ThrowException(Exception::Error(String::New("Tree new_tree is required."))); - } - - if (args.Length() == 3 || !args[3]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - DiffTreeBaton* baton = new DiffTreeBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->repoReference = Persistent::New(args[0]); - git_repository * from_repo; - from_repo = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - baton->repo = from_repo; - baton->old_treeReference = Persistent::New(args.This()); - baton->old_tree = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->new_treeReference = Persistent::New(args[1]); - git_tree * from_new_tree; - from_new_tree = ObjectWrap::Unwrap(args[1]->ToObject())->GetValue(); - baton->new_tree = from_new_tree; - baton->optsReference = Persistent::New(args[2]); - const git_diff_options * from_opts; - if (args[2]->IsObject()) { - from_opts = ObjectWrap::Unwrap(args[2]->ToObject())->GetValue(); - } else { - from_opts = 0; - } - baton->opts = from_opts; - baton->callback = Persistent::New(Local::Cast(args[3])); - - uv_queue_work(uv_default_loop(), &baton->request, DiffTreeWork, (uv_after_work_cb)DiffTreeAfterWork); - - return Undefined(); -} - -void GitTree::DiffTreeWork(uv_work_t *req) { - DiffTreeBaton *baton = static_cast(req->data); - int result = git_diff_tree_to_tree( - &baton->diff, - baton->repo, - baton->old_tree, - baton->new_tree, - baton->opts - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitTree::DiffTreeAfterWork(uv_work_t *req) { - HandleScope scope; - DiffTreeBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle to; - if (baton->diff != NULL) { - to = GitDiffList::New((void *)baton->diff); - } else { - to = Null(); - } - Handle result = to; - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->repoReference.Dispose(); - baton->old_treeReference.Dispose(); - baton->new_treeReference.Dispose(); - baton->optsReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -#include "../include/functions/copy.h" - -/** - * @param {Repository} repo - * @param {Index} index - * @param {DiffOptions} opts - * @param {DiffList} callback - */ -Handle GitTree::DiffIndex(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("Repository repo is required."))); - } - - if (args.Length() == 3 || !args[3]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - DiffIndexBaton* baton = new DiffIndexBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->repoReference = Persistent::New(args[0]); - git_repository * from_repo; - from_repo = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - baton->repo = from_repo; - baton->old_treeReference = Persistent::New(args.This()); - baton->old_tree = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->indexReference = Persistent::New(args[1]); - git_index * from_index; - if (args[1]->IsObject()) { - from_index = ObjectWrap::Unwrap(args[1]->ToObject())->GetValue(); - } else { - from_index = 0; - } - baton->index = from_index; - baton->optsReference = Persistent::New(args[2]); - const git_diff_options * from_opts; - if (args[2]->IsObject()) { - from_opts = ObjectWrap::Unwrap(args[2]->ToObject())->GetValue(); - } else { - from_opts = 0; - } - baton->opts = from_opts; - baton->callback = Persistent::New(Local::Cast(args[3])); - - uv_queue_work(uv_default_loop(), &baton->request, DiffIndexWork, (uv_after_work_cb)DiffIndexAfterWork); - - return Undefined(); -} - -void GitTree::DiffIndexWork(uv_work_t *req) { - DiffIndexBaton *baton = static_cast(req->data); - int result = git_diff_tree_to_index( - &baton->diff, - baton->repo, - baton->old_tree, - baton->index, - baton->opts - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitTree::DiffIndexAfterWork(uv_work_t *req) { - HandleScope scope; - DiffIndexBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle to; - if (baton->diff != NULL) { - to = GitDiffList::New((void *)baton->diff); - } else { - to = Null(); - } - Handle result = to; - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->repoReference.Dispose(); - baton->old_treeReference.Dispose(); - baton->indexReference.Dispose(); - baton->optsReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -#include "../include/functions/copy.h" - -/** - * @param {Repository} repo - * @param {DiffOptions} opts - * @param {DiffList} callback - */ -Handle GitTree::DiffWorkDir(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("Repository repo is required."))); - } - - if (args.Length() == 2 || !args[2]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - DiffWorkDirBaton* baton = new DiffWorkDirBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->repoReference = Persistent::New(args[0]); - git_repository * from_repo; - from_repo = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - baton->repo = from_repo; - baton->old_treeReference = Persistent::New(args.This()); - baton->old_tree = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->optsReference = Persistent::New(args[1]); - const git_diff_options * from_opts; - if (args[1]->IsObject()) { - from_opts = ObjectWrap::Unwrap(args[1]->ToObject())->GetValue(); - } else { - from_opts = 0; - } - baton->opts = from_opts; - baton->callback = Persistent::New(Local::Cast(args[2])); - - uv_queue_work(uv_default_loop(), &baton->request, DiffWorkDirWork, (uv_after_work_cb)DiffWorkDirAfterWork); - - return Undefined(); -} - -void GitTree::DiffWorkDirWork(uv_work_t *req) { - DiffWorkDirBaton *baton = static_cast(req->data); - int result = git_diff_tree_to_workdir( - &baton->diff, - baton->repo, - baton->old_tree, - baton->opts - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitTree::DiffWorkDirAfterWork(uv_work_t *req) { - HandleScope scope; - DiffWorkDirBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle to; - if (baton->diff != NULL) { - to = GitDiffList::New((void *)baton->diff); - } else { - to = Null(); - } - Handle result = to; - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->repoReference.Dispose(); - baton->old_treeReference.Dispose(); - baton->optsReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -Persistent GitTree::constructor_template; diff --git a/src/tree_builder.cc b/src/tree_builder.cc deleted file mode 100644 index 3a6b6585f..000000000 --- a/src/tree_builder.cc +++ /dev/null @@ -1,353 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ -#include -#include -#include - -#include "git2.h" - -#include "../include/functions/copy.h" - -#include "../include/tree_builder.h" -#include "../include/repo.h" -#include "../include/oid.h" -#include "../include/tree_entry.h" -#include "../include/tree.h" -#include "../include/diff_list.h" -#include "../include/diff_options.h" -#include "../include/index.h" - -using namespace v8; -using namespace node; - -GitTreeBuilder::GitTreeBuilder(git_treebuilder *raw) { - this->raw = raw; -} - -GitTreeBuilder::~GitTreeBuilder() { - git_treebuilder_free(this->raw); -} - -void GitTreeBuilder::Initialize(Handle target) { - HandleScope scope; - - Local tpl = FunctionTemplate::New(New); - - tpl->InstanceTemplate()->SetInternalFieldCount(1); - tpl->SetClassName(String::NewSymbol("TreeBuilder")); - - NODE_SET_METHOD(tpl, "create", Create); - NODE_SET_PROTOTYPE_METHOD(tpl, "clear", Clear); - NODE_SET_METHOD(tpl, "size", Size); - NODE_SET_PROTOTYPE_METHOD(tpl, "get", Get); - NODE_SET_PROTOTYPE_METHOD(tpl, "insert", Insert); - NODE_SET_PROTOTYPE_METHOD(tpl, "gitTreebuilderRemove", GitTreebuilderRemove); - NODE_SET_PROTOTYPE_METHOD(tpl, "write", Write); - - - constructor_template = Persistent::New(tpl->GetFunction()); - target->Set(String::NewSymbol("TreeBuilder"), constructor_template); -} - -Handle GitTreeBuilder::New(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsExternal()) { - return ThrowException(Exception::Error(String::New("git_treebuilder is required."))); - } - - GitTreeBuilder* object = new GitTreeBuilder((git_treebuilder *) External::Unwrap(args[0])); - object->Wrap(args.This()); - - return scope.Close(args.This()); -} - -Handle GitTreeBuilder::New(void *raw) { - HandleScope scope; - Handle argv[1] = { External::New((void *)raw) }; - return scope.Close(GitTreeBuilder::constructor_template->NewInstance(1, argv)); -} - -git_treebuilder *GitTreeBuilder::GetValue() { - return this->raw; -} - - -/** - * @param {Tree} source - * @return {TreeBuilder} out - */ -Handle GitTreeBuilder::Create(const Arguments& args) { - HandleScope scope; - - git_treebuilder * out = 0; - const git_tree * from_source; - if (args[0]->IsObject()) { - from_source = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - } else { - from_source = 0; - } - - int result = git_treebuilder_create( - &out - , from_source - ); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - Handle to; - if (out != NULL) { - to = GitTreeBuilder::New((void *)out); - } else { - to = Null(); - } - return scope.Close(to); -} - -/** - * @param {TreeBuilder} bld - */ -Handle GitTreeBuilder::Clear(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("TreeBuilder bld is required."))); - } - - git_treebuilder * from_bld; - from_bld = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - - git_treebuilder_clear( - from_bld - ); - - return Undefined(); -} - -/** - * @return {Number} result - */ -Handle GitTreeBuilder::Size(const Arguments& args) { - HandleScope scope; - - - unsigned int result = git_treebuilder_entrycount( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - to = Uint32::New(result); - return scope.Close(to); -} - -/** - * @param {String} filename - * @return {TreeEntry} result - */ -Handle GitTreeBuilder::Get(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String filename is required."))); - } - - const char * from_filename; - String::Utf8Value filename(args[0]->ToString()); - from_filename = strdup(*filename); - - const git_tree_entry * result = git_treebuilder_get( - ObjectWrap::Unwrap(args.This())->GetValue() - , from_filename - ); - free((void *)from_filename); - - Handle to; - if (result != NULL) { - result = (const git_tree_entry * )git_tree_entry_dup(result); - } - if (result != NULL) { - to = GitTreeEntry::New((void *)result); - } else { - to = Null(); - } - return scope.Close(to); -} - -/** - * @param {String} filename - * @param {Oid} id - * @param {Number} filemode - * @return {TreeEntry} out - */ -Handle GitTreeBuilder::Insert(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String filename is required."))); - } - if (args.Length() == 1 || !args[1]->IsObject()) { - return ThrowException(Exception::Error(String::New("Oid id is required."))); - } - if (args.Length() == 2 || !args[2]->IsNumber()) { - return ThrowException(Exception::Error(String::New("Number filemode is required."))); - } - - const git_tree_entry * out = 0; - const char * from_filename; - String::Utf8Value filename(args[0]->ToString()); - from_filename = strdup(*filename); - const git_oid * from_id; - from_id = ObjectWrap::Unwrap(args[1]->ToObject())->GetValue(); - git_filemode_t from_filemode; - from_filemode = (git_filemode_t) (int) args[2]->ToNumber()->Value(); - - int result = git_treebuilder_insert( - &out - , ObjectWrap::Unwrap(args.This())->GetValue() - , from_filename - , from_id - , from_filemode - ); - free((void *)from_filename); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - Handle to; - if (out != NULL) { - out = (const git_tree_entry * )git_tree_entry_dup(out); - } - if (out != NULL) { - to = GitTreeEntry::New((void *)out); - } else { - to = Null(); - } - return scope.Close(to); -} - -/** - * @param {String} filename - */ -Handle GitTreeBuilder::GitTreebuilderRemove(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsString()) { - return ThrowException(Exception::Error(String::New("String filename is required."))); - } - - const char * from_filename; - String::Utf8Value filename(args[0]->ToString()); - from_filename = strdup(*filename); - - int result = git_treebuilder_remove( - ObjectWrap::Unwrap(args.This())->GetValue() - , from_filename - ); - free((void *)from_filename); - if (result != GIT_OK) { - if (giterr_last()) { - return ThrowException(Exception::Error(String::New(giterr_last()->message))); - } else { - return ThrowException(Exception::Error(String::New("Unkown Error"))); - } - } - - return Undefined(); -} - -#include "../include/functions/copy.h" - -/** - * @param {Repository} repo - * @param {Oid} callback - */ -Handle GitTreeBuilder::Write(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("Repository repo is required."))); - } - - if (args.Length() == 1 || !args[1]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - WriteBaton* baton = new WriteBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->id = (git_oid *)malloc(sizeof(git_oid )); - baton->repoReference = Persistent::New(args[0]); - git_repository * from_repo; - from_repo = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - baton->repo = from_repo; - baton->bldReference = Persistent::New(args.This()); - baton->bld = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->callback = Persistent::New(Local::Cast(args[1])); - - uv_queue_work(uv_default_loop(), &baton->request, WriteWork, (uv_after_work_cb)WriteAfterWork); - - return Undefined(); -} - -void GitTreeBuilder::WriteWork(uv_work_t *req) { - WriteBaton *baton = static_cast(req->data); - int result = git_treebuilder_write( - baton->id, - baton->repo, - baton->bld - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitTreeBuilder::WriteAfterWork(uv_work_t *req) { - HandleScope scope; - WriteBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle to; - if (baton->id != NULL) { - to = GitOid::New((void *)baton->id); - } else { - to = Null(); - } - Handle result = to; - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - free(baton->id); - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->repoReference.Dispose(); - baton->bldReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -Persistent GitTreeBuilder::constructor_template; diff --git a/src/tree_entry.cc b/src/tree_entry.cc deleted file mode 100755 index 2d761e611..000000000 --- a/src/tree_entry.cc +++ /dev/null @@ -1,229 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ -#include -#include -#include - -#include "git2.h" - -#include "../include/functions/copy.h" - -#include "../include/tree_entry.h" -#include "../include/oid.h" -#include "../include/repo.h" -#include "../include/object.h" - -using namespace v8; -using namespace node; - -GitTreeEntry::GitTreeEntry(git_tree_entry *raw) { - this->raw = raw; -} - -GitTreeEntry::~GitTreeEntry() { - git_tree_entry_free(this->raw); -} - -void GitTreeEntry::Initialize(Handle target) { - HandleScope scope; - - Local tpl = FunctionTemplate::New(New); - - tpl->InstanceTemplate()->SetInternalFieldCount(1); - tpl->SetClassName(String::NewSymbol("TreeEntry")); - - NODE_SET_PROTOTYPE_METHOD(tpl, "name", Name); - NODE_SET_PROTOTYPE_METHOD(tpl, "oid", Oid); - NODE_SET_PROTOTYPE_METHOD(tpl, "type", Type); - NODE_SET_PROTOTYPE_METHOD(tpl, "filemode", filemode); - NODE_SET_PROTOTYPE_METHOD(tpl, "getObject", GetObject); - - - constructor_template = Persistent::New(tpl->GetFunction()); - target->Set(String::NewSymbol("TreeEntry"), constructor_template); -} - -Handle GitTreeEntry::New(const Arguments& args) { - HandleScope scope; - - if (args.Length() == 0 || !args[0]->IsExternal()) { - return ThrowException(Exception::Error(String::New("git_tree_entry is required."))); - } - - GitTreeEntry* object = new GitTreeEntry((git_tree_entry *) External::Unwrap(args[0])); - object->Wrap(args.This()); - - return scope.Close(args.This()); -} - -Handle GitTreeEntry::New(void *raw) { - HandleScope scope; - Handle argv[1] = { External::New((void *)raw) }; - return scope.Close(GitTreeEntry::constructor_template->NewInstance(1, argv)); -} - -git_tree_entry *GitTreeEntry::GetValue() { - return this->raw; -} - - -/** - * @return {String} result - */ -Handle GitTreeEntry::Name(const Arguments& args) { - HandleScope scope; - - - const char * result = git_tree_entry_name( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - to = String::New(result); - return scope.Close(to); -} - -/** - * @return {Oid} result - */ -Handle GitTreeEntry::Oid(const Arguments& args) { - HandleScope scope; - - - const git_oid * result = git_tree_entry_id( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - if (result != NULL) { - result = (const git_oid * )git_oid_dup(result); - } - if (result != NULL) { - to = GitOid::New((void *)result); - } else { - to = Null(); - } - return scope.Close(to); -} - -/** - * @return {Number} result - */ -Handle GitTreeEntry::Type(const Arguments& args) { - HandleScope scope; - - - git_otype result = git_tree_entry_type( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - to = Number::New(result); - return scope.Close(to); -} - -/** - * @return {Number} result - */ -Handle GitTreeEntry::filemode(const Arguments& args) { - HandleScope scope; - - - git_filemode_t result = git_tree_entry_filemode( - ObjectWrap::Unwrap(args.This())->GetValue() - ); - - Handle to; - to = Number::New(result); - return scope.Close(to); -} - -#include "../include/functions/copy.h" - -/** - * @param {Repository} repo - * @param {Object} callback - */ -Handle GitTreeEntry::GetObject(const Arguments& args) { - HandleScope scope; - if (args.Length() == 0 || !args[0]->IsObject()) { - return ThrowException(Exception::Error(String::New("Repository repo is required."))); - } - - if (args.Length() == 1 || !args[1]->IsFunction()) { - return ThrowException(Exception::Error(String::New("Callback is required and must be a Function."))); - } - - GetObjectBaton* baton = new GetObjectBaton; - baton->error_code = GIT_OK; - baton->error = NULL; - baton->request.data = baton; - baton->repoReference = Persistent::New(args[0]); - git_repository * from_repo; - from_repo = ObjectWrap::Unwrap(args[0]->ToObject())->GetValue(); - baton->repo = from_repo; - baton->entryReference = Persistent::New(args.This()); - baton->entry = ObjectWrap::Unwrap(args.This())->GetValue(); - baton->callback = Persistent::New(Local::Cast(args[1])); - - uv_queue_work(uv_default_loop(), &baton->request, GetObjectWork, (uv_after_work_cb)GetObjectAfterWork); - - return Undefined(); -} - -void GitTreeEntry::GetObjectWork(uv_work_t *req) { - GetObjectBaton *baton = static_cast(req->data); - int result = git_tree_entry_to_object( - &baton->object_out, - baton->repo, - baton->entry - ); - baton->error_code = result; - if (result != GIT_OK && giterr_last() != NULL) { - baton->error = git_error_dup(giterr_last()); - } -} - -void GitTreeEntry::GetObjectAfterWork(uv_work_t *req) { - HandleScope scope; - GetObjectBaton *baton = static_cast(req->data); - - TryCatch try_catch; - if (baton->error_code == GIT_OK) { - Handle to; - if (baton->object_out != NULL) { - to = GitObject::New((void *)baton->object_out); - } else { - to = Null(); - } - Handle result = to; - Handle argv[2] = { - Local::New(Null()), - result - }; - baton->callback->Call(Context::GetCurrent()->Global(), 2, argv); - } else { - if (baton->error) { - Handle argv[1] = { - Exception::Error(String::New(baton->error->message)) - }; - baton->callback->Call(Context::GetCurrent()->Global(), 1, argv); - if (baton->error->message) - free((void *)baton->error->message); - free((void *)baton->error); - } else { - baton->callback->Call(Context::GetCurrent()->Global(), 0, NULL); - } - } - - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - baton->repoReference.Dispose(); - baton->entryReference.Dispose(); - baton->callback.Dispose(); - delete baton; -} - -Persistent GitTreeEntry::constructor_template; diff --git a/src/wrapper.cc b/src/wrapper.cc deleted file mode 100644 index f2d2463be..000000000 --- a/src/wrapper.cc +++ /dev/null @@ -1,80 +0,0 @@ -/** - * This code is auto-generated; unless you know what you're doing, do not modify! - **/ -#include -#include -#include -#include - -#include "../include/wrapper.h" -#include "node_buffer.h" - -using namespace v8; -using namespace node; - -Wrapper::Wrapper(void *raw) { - this->raw = raw; -} - -void Wrapper::Initialize(Handle target) { - NanScope(); - - Local tpl = FunctionTemplate::New(New); - - tpl->InstanceTemplate()->SetInternalFieldCount(1); - tpl->SetClassName(NanSymbol("Wrapper")); - - NODE_SET_PROTOTYPE_METHOD(tpl, "toBuffer", ToBuffer); - - NanAssignPersistent(FunctionTemplate, constructor_template, tpl); - target->Set(String::NewSymbol("Wrapper"), tpl->GetFunction()); -} - -NAN_METHOD(Wrapper::New) { - NanScope(); - - if (args.Length() == 0 || !args[0]->IsExternal()) { - return NanThrowError(String::New("void * is required.")); - } - - Wrapper* object = new Wrapper(External::Cast(*args[0])->Value()); - object->Wrap(args.This()); - - NanReturnValue(args.This()); -} - -Handle Wrapper::New(void *raw) { - NanScope(); - Handle argv[1] = { External::New((void *)raw) }; - Local instance; - Local constructorHandle = NanPersistentToLocal(constructor_template); - instance = constructorHandle->GetFunction()->NewInstance(1, argv); - return scope.Close(instance); -} - -void *Wrapper::GetValue() { - return this->raw; -} - -NAN_METHOD(Wrapper::ToBuffer) { - NanScope(); - - if(args.Length() == 0 || !args[0]->IsNumber()) { - return NanThrowError(String::New("Number is required.")); - } - - int len = args[0]->ToNumber()->Value(); - - Local bufferConstructor = Local::Cast( - Context::GetCurrent()->Global()->Get(String::New("Buffer"))); - - Handle constructorArgs[1] = { Integer::New(len) }; - Local nodeBuffer = bufferConstructor->NewInstance(1, constructorArgs); - - std::memcpy(node::Buffer::Data(nodeBuffer), ObjectWrap::Unwrap(args.This())->GetValue(), len); - - NanReturnValue(nodeBuffer); -} - - -Persistent Wrapper::constructor_template; diff --git a/test/blob.js b/test/blob.js deleted file mode 100644 index c3e9363c7..000000000 --- a/test/blob.js +++ /dev/null @@ -1,13 +0,0 @@ -var git = require('../'), - path = require('path'); - -exports.content = function(test) { - var testOid = git.Oid.fromString('111dd657329797f6165f52f5085f61ac976dcf04'); - test.expect(1); - git.Repo.open(path.resolve('repos/workdir/.git'), function(err, repo) { - repo.getBlob(testOid, function(err, blob) { - test.equals(blob.toString().slice(0, 7), "@import"); - test.done(); - }); - }); -}; diff --git a/test/commit.js b/test/commit.js deleted file mode 100644 index ef0b51e8e..000000000 --- a/test/commit.js +++ /dev/null @@ -1,242 +0,0 @@ -var git = require('../'), - rimraf = require('rimraf'), - fs = require( 'fs' ); - -var historyCountKnownSHA = 'fce88902e66c72b5b93e75bdb5ae717038b221f6'; - -exports.message = function(test) { - test.expect(2); - git.Repo.open('repos/workdir/.git', function(error, repository) { - repository.getCommit(historyCountKnownSHA, function(error, commit) { - var message = commit.message(); - test.equals(error, null, 'There should be no error'); - test.equals(message, 'Update README.md', 'Message should match expected value'); - test.done(); - }); - }); -}; - -exports.sha = function(test) { - test.expect(2); - git.Repo.open('repos/workdir/.git', function(error, repository) { - repository.getCommit(historyCountKnownSHA, function(error, commit) { - var sha = commit.sha(); - test.equals(error, null, 'There should be no error'); - test.equals(sha, historyCountKnownSHA, 'SHA should match expected value'); - test.done(); - }); - }); -}; - -exports.time = function(test) { - test.expect(2); - git.Repo.open('repos/workdir/.git', function(error, repository) { - repository.getCommit(historyCountKnownSHA, function(error, commit) { - var time = commit.timeMs(); - test.equals(error, null, 'There should be no error'); - test.equals(time, 1362012884000, 'Time should match expected value'); - test.done(); - }); - }); -}; - -exports.date = function(test) { - test.expect(2); - git.Repo.open('repos/workdir/.git', function(error, repository) { - repository.getCommit(historyCountKnownSHA, function(error, commit) { - var date = commit.date(); - test.equals(error, null, 'There should be no error'); - test.equals(date.getTime(), 1362012884000, 'Date should match expected value'); - test.done(); - }); - }); -}; - -exports.offset = function(test) { - test.expect(2); - git.Repo.open('repos/workdir/.git', function(error, repository) { - repository.getCommit(historyCountKnownSHA, function(error, commit) { - var offset = commit.offset(); - test.equals(error, null, 'There should be no error'); - test.equals(offset, 780, 'Offset should match expected value'); - test.done(); - }); - }); -}; - -exports.author = function(test) { - test.expect(2); - git.Repo.open('repos/workdir/.git', function(error, repository) { - repository.getCommit(historyCountKnownSHA, function(error, commit) { - var author = commit.author(); - test.equals(error, null, 'There should be no error'); - test.notEqual(author, null, 'Author should not be null'); - test.done(); - }); - }); -}; - -exports.authorName = function(test) { - test.expect(1); - git.Repo.open('repos/workdir/.git', function(error, repository) { - repository.getCommit(historyCountKnownSHA, function(error, commit) { - var author = commit.author(); - var name = author.name(); - test.equals(name, 'Michael Robinson', 'The author name should match expected value'); - test.done(); - }); - }); -}; - -exports.authorEmail = function(test) { - test.expect(1); - git.Repo.open('repos/workdir/.git', function(error, repository) { - repository.getCommit(historyCountKnownSHA, function(error, commit) { - var author = commit.author(); - var email = author.email(); - test.equals(email, 'mike@panmedia.co.nz', 'The author email should match expected value'); - test.done(); - }); - }); -}; - -exports.committerName = function(test) { - test.expect(1); - git.Repo.open('repos/workdir/.git', function(error, repository) { - repository.getCommit(historyCountKnownSHA, function(error, commit) { - var committer = commit.committer(); - var name = committer.name(); - test.equals(name, 'Michael Robinson', 'The author name should match expected value'); - test.done(); - }); - }); -}; - -exports.committerEmail = function(test) { - test.expect(1); - git.Repo.open('repos/workdir/.git', function(error, repository) { - repository.getCommit(historyCountKnownSHA, function(error, commit) { - var committer = commit.committer(); - var email = committer.email(); - test.equals(email, 'mike@panmedia.co.nz', 'The committer email should match expected value'); - test.done(); - }); - }); -}; - -/** - * Test that improper commit ID's result in an error message - */ -exports.improperCommitId = function(test) { - test.expect(1); - git.Repo.open('repos/workdir/.git', function(error, repository) { - repository.getCommit('not a proper commit sha', function(error, commit) { - test.notEqual(error, null, 'Error should occur'); - test.done(); - }); - }); -}; - -/** - * Test that retreiving walking a given commit's history works as expected. - */ -exports.history = function(test) { - test.expect(4); - git.Repo.open('repos/workdir/.git', function(error, repository) { - repository.getCommit(historyCountKnownSHA, function(error, commit) { - test.equals(null, error, 'Getting latest branch commit should not error'); - var historyCount = 0; - var expectedHistoryCount = 364; - commit.history().on('commit', function(commit) { - historyCount++; - }).on('end', function(commits) { - test.equals(null, error, 'There should be no errors'); - test.equals(historyCount, expectedHistoryCount); - test.equals(commits.length, expectedHistoryCount); - test.done(); - }).on('error', function(error) { - test.equals(null, error, 'There should be no errors'); - test.ok(false, 'There should be no errors'); - }).start(); - }); - }); -}; - -/** - * Test that retreiving master branch's HEAD commit works as expected. - */ -exports.masterHead = function(test) { - test.expect(1); - git.Repo.open('repos/workdir/.git', function(error, repository) { - repository.getBranch('master', function(error, branch) { - var sha = branch.sha(); - repository.getCommit(sha, function(error, commit) { - test.equals(error, null, 'Getting latest branch commit should not error'); - test.done(); - }); - }); - }); -}; - -/** - * Test that retreiving parent works as expected. - * - * @param {Object} test - */ -exports.parents = function(test) { - test.expect(3); - git.Repo.open('repos/workdir/.git', function(error, repository) { - repository.getCommit(historyCountKnownSHA, function(error, commit) { - commit.getParents(function(error, parents) { - test.equals(parents.length, 1, 'Commit should have exactly one parent'); - var sha = parents[0].sha(); - test.equals(error, null, 'Getting parent SHA should not error'); - test.equals(sha, 'ecfd36c80a3e9081f200dfda2391acadb56dac27', 'Parent SHA should match expected value'); - test.done(); - }); - }); - }); -}; - -/** - * Test that retrieving and walking a commit's tree works as expected. - */ -exports.tree = function(test) { - test.expect(2); - git.Repo.open('repos/workdir/.git', function(error, repository) { - repository.getCommit(historyCountKnownSHA, function(error, commit) { - test.equals(error, null, 'Getting latest branch commit should not error'); - - var commitTreeEntryCount = 0; - var expectedCommitTreeEntryCount = 198; - commit.getTree(function(error, tree) { - tree.walk().on('entry', function(entry) { - commitTreeEntryCount++; - }).on('end', function(error, entries) { - test.equals(commitTreeEntryCount, expectedCommitTreeEntryCount, 'Commit tree entry count does not match expected'); - test.done(); - }).start(); - }); - }); - }); -}; - -/** - * Test that getDiff works as expected. - */ -exports.getDiff = function(test) { - test.expect(1); - git.Repo.open('repos/workdir/.git', function(error, repository) { - repository.getCommit(historyCountKnownSHA, function(error, commit) { - commit.getDiff(function(error, diff) { - test.equals(diff.length, 1, 'Should be one item in parents diff trees'); - test.done(); - }); - }); - }); -}; - -process.on('uncaughtException', function(err) { - console.log(err.stack); -}); - diff --git a/test/difflist.js b/test/difflist.js deleted file mode 100644 index 3c9e0f919..000000000 --- a/test/difflist.js +++ /dev/null @@ -1,49 +0,0 @@ -var git = require('../'), - rimraf = require('rimraf'), - fs = require( 'fs' ); - -var historyCountKnownSHA = 'fce88902e66c72b5b93e75bdb5ae717038b221f6'; - -/** - * Test that retreiving parent works as expected. - * - * @param {Object} test - */ -exports.walkingDiffs = function(test) { - test.expect(16); - git.Repo.open('repos/workdir/.git', function(error, repository) { - repository.getCommit(historyCountKnownSHA, function(error, commit) { - commit.getDiff(function(error, diffList) { - test.equal(null, error, 'Should not error'); - - diffList[0].patches().forEach(function(patch) { - test.equal(null, error, 'Should not error'); - - test.equal(patch.oldFile().path(), 'README.md', 'Old file path should match expected'); - test.equal(patch.newFile().path(), 'README.md', 'New file path should match expected'); - test.equal(patch.size(), 1, 'Content array should be of known length'); - test.ok(patch.isModified(), 'Status should be known type'); - - var hunk = patch.hunks()[0]; - test.equal(hunk.size(), 5, 'Content array should be of known length'); - var lines = hunk.lines(); - - test.equal(lines[0].lineOrigin, git.DiffList.LineOrigin.Context, 'First content item should be context'); - test.equal(lines[1].lineOrigin, git.DiffList.LineOrigin.Context, 'Second content item should be context'); - test.equal(lines[2].lineOrigin, git.DiffList.LineOrigin.Context, 'Third content item should be context'); - - var oldContent = '__Before submitting a pull request, please ensure both unit tests and lint checks pass.__\n'; - test.equal(lines[3].content, oldContent, 'Old content should match known value'); - test.equal(lines[3].lineOrigin, git.DiffList.LineOrigin.Deletion, 'Fourth content item should be deletion'); - test.equal(lines[3].length, 90, 'Fourth content length should match known value'); - - var newContent = '__Before submitting a pull request, please ensure both that you\'ve added unit tests to cover your shiny new code, and that all unit tests and lint checks pass.__\n'; - test.equal(lines[4].content, newContent, 'New content should match known value'); - test.equal(lines[4].lineOrigin, git.DiffList.LineOrigin.Addition, 'Fifth content item should be addition'); - test.equal(lines[4].length, 162, 'Fifth content length should match known value'); - test.done(); - }); - }); - }); - }); -}; diff --git a/test/encrypted_rsa b/test/encrypted_rsa new file mode 100644 index 000000000..2ecf534fa --- /dev/null +++ b/test/encrypted_rsa @@ -0,0 +1,54 @@ +-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: AES-128-CBC,76C5AF2E37D863AA3712FFD502FE63FF + +dp3B4Mz7//+AxUpMEBiJUmOGherBSM5LGov4Je7Od+LoORPCGu17+l44VovPnwgg +JrK5IcV/MiDOJ/iYXtyeMWtUWU8v66M3bmQn83VzcUv0SnuXi673tNtZe14u2UhM +adW78AqC15lM2za1iMLM7Y5MTcg8rAtXZIuOllxIRCt+sbsLWObMo9YHPjiRPPa/ +cDfIW+PXTTiIE0qw+7cACVRXXZsH04HqvrK8qH34MZcGqLjA3sj1fZt0wd2IgwGa +CK0lm8edMcLlZeM9vippKUEgNvjgN+8xCcPf9AoCEQepd4JVAIJYeeV+qxkUsp4s +SV2dUF8RWXdTG9QT7PCZq0o0KMcD0u7yhAquI21+ggti3lx1Ix9I5wMNCBqNEYVE +Q/hNt8PJwjaiD3x+rFHjjiVmA0onAIgdMXPLLkg43H3kBYanFc8mP+hLtegNlY5V +zfb3RilnMua0DHf0sqybvPNrvrUNchQPL0Py00PP4yf3nuKcIYgqgj+1wMVK1prS +4nMCyFNAlSt7KxSJOGU050BeBYcYlQRPXicoesNqfFmJ2hNv4jc1EM9khxSlcsU6 +zS3ZFv4kPJoKnn6CBOjsFwZ8EY7wwJvZHwDFZnrBXU8LcLG8+KCjvjHb0qXGFfG7 +g3VElz5IFhN6MrpGGrn/81LFGepg4FCa+55xCje1ykJrPdMwTnA4ezOfvcVXIuFm +iVk/2GiUUDOmn2FeSyNYSgQ/UFgxc5jUXMRcSDRKOM3y/Upl8+hzne91IHVVH1el +ChMR5HowQaRKmTgml6nG43D4jTcwcJdhcuDbXlbvBgVg0rTs4ZDVjP6RtCTmZete +nlHIYeRxutvGBYD4xjuSEldsSoQvVaDkLoKNZCHLzNFEutG4P4Ee8frGFD11j3gu +R+p7KlLLa+MyfW3Z3yxMc84iTw8ULzIQFRQX3Wk1NBW9giRpxEDZpXK3KYZErFZ6 +dK2zFAYpryYQxFx5+Qnl9okTAxi7NHhAVksKAQQBEgikbS8vJSJvDBhidpOJ1Kr0 +L896bjOFdzl8Xz0VturUmduSghiCrMPzCC5iVonJfqNLaXExYDWlcr+GpZzbuxbw +tP0KjvL4lM9MwgxCAXLh6SAwn79I3VWFTDMsFBLoCVpReyu7RU546DhuspZmY7+l +kuWhjE4P+zYGy9Rd8xnXREsDD43m2laX1JDjRHDAw5maUBWJXCXUDmSSL1Tyha9y +r6H99HjfMYc2RgPr+4yjYeVU1d1PgS94q9u8XiFDsDC3+WrxpQQdpraN6gRhdb2j +aoZ7BpqMxccaMwskRayoTCXOzd0p2Xf848soQtnRTE7/TowUHE0QGFn5v+zgUeFI +aGj61MB9q+M297/XTeqDowr4QDYGWA8HWGGkaAUhmntyoQRjIdN50dztDy0IeQrY +rL7scCBGeyxlFJiazhs5h9ppCchhhuxR2hViRGzXIdaII2MwKHh1UDw7+Jm14K46 +fSrFmhZJn2cGFL+fRQEwV1tBUG083DJJFtfP0XxDtcEkSZxMfE7GFDTL7izRGdCg +Mj3zOsUbhZxc8YSAqPIT/tYNuaESk/m+s14EF4ni8WU19T4tKNy0LFh5eFHfk9OO +Sz1CIaOWVRSnDcYiOGacpBV8LLrjaqq+awi9EATj9t/47OzOKbO3IVzVgjZKQBsl +mgo+vjZ3W0LH59XNVgE15x6NqG+gdlPRbX+1iJF44c8RWIm5RLtZ8RfQw63P1PgI +R9VRlfqhtna17nOYP/w03WfGDRUGR6MgSdwyy6pKo93hsvJRCpBzmAiXtcVxiVZ7 +ggGk6khhtWrEKZe8DYT1lWamJyng+dUhHGPxYPdoeBy9fZ2fQzoT+OVPCFCuUy2h +wBwRJjOCr7e7fwuyS+cAM/1uRfLDMq7xs7N1JGSVePAvGvICrEDHoMMe5oSMKpoi +EEjPxS+glKdLflbXatJ4V5+Zm/dVvSkb9b+k5RNV4rFZst1Yahl/7expic0rVF6h +nOieyRPzFeE9cGDtDkoFOicixJfLOI6Ex/6nfGsIJa2hcXFm+ibO54vWCgtgMaXm +AANrwxvvEWROvmkL8l/tbmWgj8Nd/Vt8L+2r/7tcZ6vWWqseTUNOYL9o2gV6lHEn +OkGY/WrPUfAIXml5dWTDWGGi8YuE2KhYXBn6msN8lcO7hGOIWetfnCGB7xsafjBl +4xaT18L4b+mNSX3lmCqfcXMY0tJZUZBgFw9gAdIl3xcKqyUSMHJHyMpm7dj0cP0P +Egs4Y4qDN5uvVI2zHobYfhez2uk+T43yXu/tYFVFL9I37/fshzPJoVb7oevNDNKH +siPljm/dhGB8jxbXoARJFoHgUeA+CLj9bD9JSh5Siu93KTjYTJZLK/XC0nVe/R4q +Sd7EaM2B92IU5LPGnO7BvcXHygjhwCqVyyGFiS5m6VirG6TsZEawPTVtWFme+T71 +FQevwtgNkmblN7ETvqrsXkaU992zGa7dPs8Dsvpmm9TS7wHNObFRPQshJ7i7lKG4 +vypXduG0vPjGwFqoz4UxGYFYpytfXT83+Iww4VGfKA0NbIkG3huh/Tl5mya39FUi +b2tMk2pE9Zjj0uLTlTR7YSPKDD/C67kPww9ppCW3CTJPn7Fp5cIMjGp6KtCEnfz4 +dMRMPOH7VIi57XsSWm2frcxmMkNIE+80JD9hW7zjANkrsHZ/l9QwTOCzlMNWKu82 +RrXIj4peZocXBYxlqNyX7LaaK1lVgbjwX51eyaNKMiOcV483XEWAj6omkJi2jRte +eOMLQ0ZmIMklIG8dA4YTilmvFczN5TUEfGmKknQ3tVkjGyYVThjbKoEeSJODKDIN +md46fUM1sIYR/CtDQl3KK0KSUdYGvsSNOF4g/6D96d6GQk85nQvlUkxCjjUxNaur +TgF0+DM3O+UjSs2RGldrqtb4DP2AtqH1WulKdX94Y/LUb6icGI6jC6QwgdAjZpAC +s+os4irP6fLOho4IpNtqFqWzairzFhnyDWussh9A50mxPt1EFl1ygbjgD6KKdtx3 +ZaI+ZjKtqY/2d4qmhnKT68is5cv/vXy+Eow3uT5SvXeCOtS8ceWBpYKfO/pc3Bh2 +wNf1gVqdttD3npCqaXCKo9QdXYqt+hqeLK55p7D7CMG5c05tOCr06l3VUG7Wq5RK +-----END RSA PRIVATE KEY----- diff --git a/test/encrypted_rsa.pub b/test/encrypted_rsa.pub new file mode 100644 index 000000000..3ab84a7d0 --- /dev/null +++ b/test/encrypted_rsa.pub @@ -0,0 +1 @@ +ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQC6x3pGhFSz+CT5bjeIWNvYNh4BBNFiGn+YzT09we58vwnyHWb940e6SJalrTqLH+6QWAXMcZpSZFBwnYia9Ja/XE2UWwxQU5rryGdc5Tju/u1kB0HuMjGpQDmFSpVrJcfRS5wXJCtoka5O0qvPUUNVAK9k/6HVX+2CGbafNjOMXnNtWgKKWlDd5eQAk3Ok3LvSrTv0kp9S3C4KdO8cz9NvtlWXHTFit94kOIzaL/v10f2u6V93VvV5jckJwZhjfyH1Q2WS/+j0ck7WqyGEBltPz6dubJipqrlAHRUacmWtI0ODdsoNwUrKCidiEkwheWA2SmbIdkTInt5vrcYvbOdNrn9aJ1aII15PSLE7eCqNdasEkp2G75hQ5DWYnprxVGY2FzqCCg2DwYev1qe4MzJ+m9PrUL+FKCrkhiCgpaOT+Noz5gX1gRvYtvg+BeRA2uoIi7rK1A3CapKRJmVM7kukgCZ5ZSMgXgXPtX4ttUgu43jVjEdcjrkoZqBSH8l72cmEcIyjahuJZqg859CyQbX5qAGDwxdz7Qjj0gwGU//bNd3/vZMzWEfkN49iDpYjipLZzg4MR946kFTT90X9S5ryjSXkyFZJWSyks8K8BOztL/o8avAu4RUn/4+ipISSz+5FoOi/pkH+KkvIoSzlcuGxAigDyUlev8wxn/FQj2tmGQ== your_email@example.com diff --git a/test/id_rsa b/test/id_rsa new file mode 100644 index 000000000..91ae66320 --- /dev/null +++ b/test/id_rsa @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEpAIBAAKCAQEA5E3IF5x7qkdIG8HoJ6/Wcc+IU5I41f0qYCTTyc+5qPxyjW8d +K6o49T9MhkdOd0fRkhRYptrreDSvQzz1JEHcrnMPg6C5GERyZpSeATWvTUwg9LJf +nklztvevZGaIwmEjoG5pAicfJnYE8Ic3YBP44Qa7GNITxOqUU5VPh+vP83jXSVAC +CX9Cy1zpt4aUyiwNfVSApbZf4/VbwSaYER3QcpVxMi/B6JGKY5EEJNWV495uzZaP +Mg3YOFXJYziVgvl4fJMUTHyucf1UVbGCgkFzeUJcynqn+1EkQ1Ev+5haD1AVvneJ +MCrRsUbFriC9snqs4n2VEzoLIffmVgFsVn30GQIDAQABAoIBAQDPQm2sQbti0mN8 +D4Uawl8D40v30n8WhUa7EbPTOmlqKAQ2sfDhex9KRbTLEmEBmImA/Eee8o9iCTIy +8Fv8Fm6pUHt9G6Pti/XvemwW3Q3QNpSUkHqN0FDkgecQVqVBEb6uHo3mDm4RFINX +eOmkp30BjIK9/blEw1D0sFALLOEUPaDdPMwiXtFgqfrFSgpDET3TvQIwZ2LxxTm0 +cNmP3sCSlZHJNkZI4hBEWaaXR+V5/+C1qblDCo5blAWTcX3UzqrwUUJgFi6VnBuh +7S9Q6+CEIU+4JRyWQNmY8YgZFaAp6IOr/kyfPxTP1+UEVVgcLn3WDYwfG9og0tmz +fzlruAgBAoGBAPfz73Pey86tNZEanhJhbX8gVjzy2hvyhT0paHg0q/H6c1VWOtUH +OwZ3Ns2xAZqJhlDqCHnQYSCZDly042U/theP4N8zo1APb4Yg4qdmXF9QE1+2M03r +kS6138gU/CSCLf8pCYa6pA/GmsaXxloeJGLvT4fzOZRsVav80/92XHRhAoGBAOu2 +mKh4Gr1EjgN9QNbk9cQTSFDtlBEqO/0pTepvL73UvNp/BAn4iYZFU4WnklFVBSWc +L84Sc732xU12TAbTTUsa6E7W29pS8u7zVTxlIdQIIU5pzDyU1pNNk2kpxzte5p3Y +PDtniPFsoYLWoH0LpsKL93t2pLAj+IOkE6f3XBq5AoGAIKaYo5N1FxQr952frx/x +QUpK0N/R5Ng8v18SiLG26rhmM5iVSrQXC7TrHI7wfR8a9tC6qP/NqnM9NuwC/bQ0 +EEo7/GhaWxKNRwZRkmWiSFLNGk9t1hbtGU+N1lUdFtmloPIQdRNiw0kN3JTj474Q +YI7O1EItFORnK6yxZfR6HEECgYEA1CT7MGUoa8APsMRCXyaiq15Pb8bjxK8mXquW +HLEFXuzhLCW1FORDoj0y9s/iuKC0iS0ROX8R/J7k5NrbgikbH8WP36UxKkYNr1IC +HOFImPTYRSKjVsL+fIUNb1DSp3S6SsYbL7v3XJJQqtlQiDq8U8x1aQFXJ9C4EoLR +zhKrKsECgYBtU/TSF/TATZY5XtrN9O+HX1Fbz70Ci8XgvioheVI2fezOcXPRzDcC +OYPaCMNKA5E8gHdg4s0TN7uDvKTJ+KhSg2V7gZ39A28dHrJaRX7Nz4k6t2uEBjX9 +a1JidpAIbJ+3w7+hj6L299tVZvS+Y/6Dz/uuEQGXfJg/l/5CCvQPsA== +-----END RSA PRIVATE KEY----- diff --git a/test/id_rsa.pub b/test/id_rsa.pub new file mode 100644 index 000000000..bd84623f1 --- /dev/null +++ b/test/id_rsa.pub @@ -0,0 +1 @@ +ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDkTcgXnHuqR0gbwegnr9Zxz4hTkjjV/SpgJNPJz7mo/HKNbx0rqjj1P0yGR053R9GSFFim2ut4NK9DPPUkQdyucw+DoLkYRHJmlJ4BNa9NTCD0sl+eSXO2969kZojCYSOgbmkCJx8mdgTwhzdgE/jhBrsY0hPE6pRTlU+H68/zeNdJUAIJf0LLXOm3hpTKLA19VICltl/j9VvBJpgRHdBylXEyL8HokYpjkQQk1ZXj3m7Nlo8yDdg4VcljOJWC+Xh8kxRMfK5x/VRVsYKCQXN5QlzKeqf7USRDUS/7mFoPUBW+d4kwKtGxRsWuIL2yeqzifZUTOgsh9+ZWAWxWffQZ your_email@example.com diff --git a/test/index.js b/test/index.js new file mode 100644 index 000000000..52cbff50e --- /dev/null +++ b/test/index.js @@ -0,0 +1,27 @@ +var fork = require("child_process").fork; +var path = require("path"); + +var bin = "./node_modules/.bin/istanbul"; +var cov = "cover --report=lcov --dir=test/coverage/js _mocha --".split(" "); + +if (process.platform === 'win32') { + bin = "./node_modules/mocha/bin/mocha"; + cov = []; +} + +var args = cov.concat([ + "test/runner", + "test/tests", + "--expose-gc", + "--timeout", + "15000" +]); + +if (!process.env.APPVEYOR && !process.env.TRAVIS) { + var local = path.join.bind(path, __dirname); + var dummyPath = local("home"); + process.env.HOME = dummyPath; + process.env.USERPROFILE = dummyPath; +} + +fork(bin, args, { cwd: path.join(__dirname, "../") }).on("close", process.exit); diff --git a/test/nodegit-test-rsa b/test/nodegit-test-rsa new file mode 100644 index 000000000..f5500849f --- /dev/null +++ b/test/nodegit-test-rsa @@ -0,0 +1,15 @@ +-----BEGIN RSA PRIVATE KEY----- +MIICXQIBAAKBgQC5VMnDStyy3BDQ4Ilb/XnD5htRn0f3Is8NITe9o/KwZcPy2MFM +RssnQjyp/WoQXl/o3Id+UaTZ+TKzYYmP/bGnYlyZP+FBBTKrCcDDW0fVjLbDazEX +usAYCW58iGG9EomrwpSVrZlrLsZc1CPUdeeC8jmTuBQR7fyDFxM4sdoIyQIDAQAB +AoGBAA7ZZ00Ze3NtRH+n7fmL5qy2jCvPClIX3OUdazzrnO9bSAb2LQ6ygWaE3R11 +q4DiPucGfGi3m/DHEbPjtcTuu8Qdqr7Nb0FU2CS2og0zMO+Z+CcyzIkXN0o7spig +ekEY1Uml5MKGcJxu6afsOn+9LIi05SsYPCdtB5z716IewNgxAkEA2pKXov7jobMb +2vBA4BtEtcryflWFO21uwMDHlJ34mjrUAMCX89NPrY//v8g0eYGLm9ZAj/Ik632+ +uOKHCXaG7QJBANkRBBcnKojqybJoxbL9PLJ3VoJ+EfLllGsbMHzc5VMwQF8ViwBD +dOQ5feVKg601814Y1NGul/nprk896GNbUc0CQQCHD8iV1u1wcQ4IZyeflBoMQAFC +YbQ3ebLTdwyc4FTLcQiqAlijOXNl67J8nskWQB+1x1oT2OxJfGVLN+d7yHstAkBz +GKCwniXhn4z/OqrJc5mBj+GhI7PktXLzL+GP85jteUJIqKWhqCMM+KcWs2IKr/ax +SD1gSVFwREYW4l6cgElpAkBtngXppGinh3nywIIo/SFmUJV/cUlWBi6MMgfsP8b6 +37+bqJI+m56WBdAG2xNz0uk6DIMp6R7JafOpfgOIPPk9 +-----END RSA PRIVATE KEY----- diff --git a/test/nodegit-test-rsa.pub b/test/nodegit-test-rsa.pub new file mode 100644 index 000000000..8092c1642 --- /dev/null +++ b/test/nodegit-test-rsa.pub @@ -0,0 +1 @@ +ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQC5VMnDStyy3BDQ4Ilb/XnD5htRn0f3Is8NITe9o/KwZcPy2MFMRssnQjyp/WoQXl/o3Id+UaTZ+TKzYYmP/bGnYlyZP+FBBTKrCcDDW0fVjLbDazEXusAYCW58iGG9EomrwpSVrZlrLsZc1CPUdeeC8jmTuBQR7fyDFxM4sdoIyQ== diff --git a/test/nodegit.js b/test/nodegit.js deleted file mode 100644 index b603e7ae5..000000000 --- a/test/nodegit.js +++ /dev/null @@ -1,40 +0,0 @@ -var fs = require('fs'); -var rimraf = require('rimraf'); -var exec = require('child_process').exec; -var path = require('path'); -var async = require('async'); - -var testFiles = ['blob','difflist','oid','repo','tree_entry','commit','reference','revwalk','tree']; - -function setupReposCache(cb) { - fs.mkdir('repos',function() { - async.series([ - function empty(cb) { exec('git init repos/empty',function() { cb(); }); }, - function workdir(cb) { exec('git clone https://github.com/nodegit/nodegit.git repos/workdir',function() { cb(); }); }, - function nonrepo(cb) { - fs.mkdir('repos/nonrepo',function() { - fs.writeFile('repos/nonrepo/file.txt','This is a bogus file',function() { - cb(); - }); - }); - } - ],function() { - cb(); - }); - }); -} - -exports.setUp = function(cb) { - fs.exists('.reposCache', function(exists) { - if (!exists) { - setupReposCache(function(err) { - cb(); - }); - } - }); -}; - -Object.keys(testFiles).forEach(function(fileName) { - var testFile = testFiles[fileName] - exports[testFile] = require('./' + testFile); -}); diff --git a/test/oid.js b/test/oid.js deleted file mode 100644 index 3ce3d48d1..000000000 --- a/test/oid.js +++ /dev/null @@ -1,10 +0,0 @@ -var git = require('../'); - -var knownSha = 'fce88902e66c72b5b93e75bdb5ae717038b221f6'; - -exports.fromStringAndSha = function(test) { - test.expect(1); - var oid = git.Oid.fromString(knownSha); - test.equal(oid.sha(), knownSha, 'SHA should match known value'); - test.done(); -}; diff --git a/test/reference.js b/test/reference.js deleted file mode 100644 index e608a622f..000000000 --- a/test/reference.js +++ /dev/null @@ -1,14 +0,0 @@ -var git = require('../'), - rimraf = require('rimraf'); - -// Ref::Lookup -exports.lookup = function(test) { - test.expect(1); - - git.Repo.open('repos/workdir/.git', function(error, repo) { - repo.getReference('refs/heads/master', function(error, reference) { - test.ok(reference instanceof git.Reference); - test.done(); - }); - }); -}; diff --git a/test/repo.js b/test/repo.js deleted file mode 100644 index 70f3128dd..000000000 --- a/test/repo.js +++ /dev/null @@ -1,57 +0,0 @@ -var git = require('../'), - rimraf = require('rimraf'), - fs = require( 'fs' ); - -/** - * Repo - * Ensure the repo method can handle opening repositories with async/sync - * signatures properly. - */ -exports.openInvalidRepo = function(test){ - test.expect(1); - - // Test invalid repository - git.Repo.open('repos/nonrepo', function(error, repository) { - test.ok(error instanceof Error); - test.done(); - }); -}; - -exports.openValidRepo = function(test){ - test.expect(1); - - // Test valid repository - git.Repo.open('repos/workdir/.git', function(error, repository) { - test.equals(null, error, 'Valid repository error code'); - test.done(); - }); -}; - -/** - * Ensure repo doesn't attempt to open missing directories - */ -exports.nonexistentDirectory = function(test) { - test.expect(2); - git.Repo.open('/surely/this/directory/does/not/exist/on/this/machine', function(error, repository) { - test.notEqual(error, null, 'Attempting to open a nonexistent directory should error'); - test.equals(repository, null, 'Non existent directory should result in null repository'); - test.done(); - }); -}; - -/** - * Ensure the init method can create repositories at the destination path and - * can create either bare/non-bare. - */ -exports.init = function(test) { - test.expect(2); - // Create bare repo and test for creation - git.Repo.init('repos/newrepo', true, function(error, path, isBare) { - test.equals(null, error, 'Successfully created bare repository'); - // Verify repo exists - git.Repo.open('repos/newrepo', function(error, path, repo) { - test.equals(null, error, 'Valid repository created'); - test.done(); - }); - }); -}; diff --git a/test/revwalk.js b/test/revwalk.js deleted file mode 100644 index 469f18b51..000000000 --- a/test/revwalk.js +++ /dev/null @@ -1,24 +0,0 @@ -var git = require('../').raw, - path = require('path'), - rimraf = require('rimraf'); - -// Helper functions -var helper = { - // Test if obj is a true function - testFunction: function(test, obj, label) { - // The object reports itself as a function - test(typeof obj, 'function', label +' reports as a function.'); - // This ensures the repo is actually a derivative of the Function [[Class]] - test(toString.call(obj), '[object Function]', label +' [[Class]] is of type function.'); - }, - // Test code and handle exception thrown - testException: function(test, fun, label) { - try { - fun(); - test(false, label); - } - catch (ex) { - test(true, label); - } - } -}; diff --git a/test/runner.js b/test/runner.js new file mode 100644 index 000000000..a7a28cf57 --- /dev/null +++ b/test/runner.js @@ -0,0 +1,79 @@ +var promisify = require("promisify-node"); +var fse = promisify("fs-extra"); +var path = require("path"); +var local = path.join.bind(path, __dirname); +var exec = require('../utils/execPromise'); + +var NodeGit = require('..'); + +if(process.env.NODEGIT_TEST_THREADSAFETY) { + console.log('Enabling thread safety in NodeGit'); + NodeGit.enableThreadSafety(); +} else if (process.env.NODEGIT_TEST_THREADSAFETY_ASYNC) { + console.log('Enabling thread safety for async actions only in NodeGit'); + NodeGit.setThreadSafetyStatus(NodeGit.THREAD_SAFETY.ENABLED_FOR_ASYNC_ONLY); +} + +var workdirPath = local("repos/workdir"); + +before(function() { + this.timeout(350000); + + var url = "https://github.com/nodegit/test"; + return fse.remove(local("repos")) + .then(function() { + fse.remove(local("home")) + }) + .then(function() { + fse.mkdir(local("repos")); + }) + .then(function() { + return exec("git init " + local("repos", "empty")); + }) + .then(function() { + return exec("git clone " + url + " " + workdirPath); + }) + .then(function() { + return exec("git checkout rev-walk", {cwd: workdirPath}); + }) + .then(function() { + return exec("git checkout checkout-test", {cwd: workdirPath}); + }) + .then(function() { + return exec("git checkout master", {cwd: workdirPath}); + }) + .then(function() { + return fse.mkdir(local("repos", "nonrepo")); + }) + .then(function() { + return fse.writeFile(local("repos", "nonrepo", "file.txt"), + "This is a bogus file"); + }) + .then(function() { + return fse.mkdir(local("home")); + }) + .then(function() { + return fse.writeFile(local("home", ".gitconfig"), + "[user]\n name = John Doe\n email = johndoe@example.com"); + }); +}); + +beforeEach(function() { + this.timeout(4000); + return exec("git clean -xdf", {cwd: workdirPath}) + .then(function() { + return exec("git checkout master", {cwd: workdirPath}); + }) + .then(function() { + return exec("git reset --hard", {cwd: workdirPath}); + }); +}); + +afterEach(function(done) { + process.nextTick(function() { + if (global.gc) { + global.gc(); + } + done(); + }); +}); diff --git a/test/tests/annotated_commit.js b/test/tests/annotated_commit.js new file mode 100644 index 000000000..b6fe27864 --- /dev/null +++ b/test/tests/annotated_commit.js @@ -0,0 +1,70 @@ +var assert = require("assert"); +var path = require("path"); +var local = path.join.bind(path, __dirname); + +describe("AnnotatedCommit", function() { + var NodeGit = require("../../"); + var Repository = NodeGit.Repository; + var AnnotatedCommit = NodeGit.AnnotatedCommit; + var Branch = NodeGit.Branch; + + var reposPath = local("../repos/workdir"); + var branchName = "master"; + + beforeEach(function() { + var test = this; + + return Repository.open(reposPath) + .then(function(repository) { + test.repository = repository; + }); + }); + + it("can create an AnnotatedCommit from a ref", function() { + var test = this; + + return Branch.lookup(test.repository, branchName, Branch.BRANCH.LOCAL) + .then(function(ref) { + return AnnotatedCommit.fromRef(test.repository, ref); + }) + .then(function(annotatedCommit) { + assert(annotatedCommit.id()); + }); + }); + + it("can free an AnnotatedCommit after creating it", function() { + var test = this; + + return Branch.lookup(test.repository, branchName, Branch.BRANCH.LOCAL) + .then(function(ref) { + return AnnotatedCommit.fromRef(test.repository, ref); + }) + .then(function(annotatedCommit) { + // Annotated commit should exist + assert(annotatedCommit.id()); + + // Free the annotated commit + annotatedCommit.free(); + + // Getting the id should now throw because the commit was freed + assert.throws(annotatedCommit.id); + }); + }); + + it("can lookup an AnnotatedCommit after creating it", function() { + var test = this; + var id; + + return Branch.lookup(test.repository, branchName, Branch.BRANCH.LOCAL) + .then(function(ref) { + return AnnotatedCommit.fromRef(test.repository, ref); + }) + .then(function(annotatedCommit) { + id = annotatedCommit.id(); + return AnnotatedCommit.lookup(test.repository, id); + }) + .then(function(annotatedCommit) { + assert(id, annotatedCommit.id()); + }); + }); +}); diff --git a/test/tests/attr.js b/test/tests/attr.js new file mode 100644 index 000000000..cf229d372 --- /dev/null +++ b/test/tests/attr.js @@ -0,0 +1,36 @@ +var assert = require("assert"); +var path = require("path"); +var local = path.join.bind(path, __dirname); + +describe("Attr", function() { + var NodeGit = require("../../"); + var Repository = NodeGit.Repository; + var Attr = NodeGit.Attr; + var Status = NodeGit.Status; + + var reposPath = local("../repos/workdir"); + + beforeEach(function() { + var test = this; + + return Repository.open(reposPath) + .then(function(repository) { + test.repository = repository; + }); + }); + + it("can add a macro definition", function() { + var error = Attr.addMacro(this.repository, "binary", "-diff -crlf"); + + assert.equal(error, 0); + }); + + it("can flush the attr cache", function() { + Attr.cacheFlush(this.repository); + }); + + it("can lookup the value of a git attribute", function() { + var flags = Status.SHOW.INDEX_AND_WORKDIR; + return Attr.get(this.repository, flags, ".gitattributes", "test"); + }); +}); diff --git a/test/tests/blame.js b/test/tests/blame.js new file mode 100644 index 000000000..c16efdd03 --- /dev/null +++ b/test/tests/blame.js @@ -0,0 +1,36 @@ +var assert = require("assert"); +var RepoUtils = require("../utils/repository_setup"); +var path = require("path"); +var local = path.join.bind(path, __dirname); + +describe("Blame", function() { + var NodeGit = require("../../"); + + var Blame = NodeGit.Blame; + + var test; + var fileName = "foobar.js"; + var repoPath = local("../repos/blameRepo"); + + beforeEach(function() { + test = this; + + return RepoUtils.createRepository(repoPath) + .then(function(repository) { + test.repository = repository; + + return RepoUtils.commitFileToRepo( + repository, + fileName, + "line1\nline2\nline3" + ); + }); + }); + + it("can initialize blame without options", function() { + return Blame.file(test.repository, fileName) + .then(function(blame) { + assert(blame); + }); + }); +}); diff --git a/test/tests/blob.js b/test/tests/blob.js new file mode 100644 index 000000000..342cb2578 --- /dev/null +++ b/test/tests/blob.js @@ -0,0 +1,53 @@ +var assert = require("assert"); +var path = require("path"); +var local = path.join.bind(path, __dirname); + +describe("Blob", function() { + var NodeGit = require("../../"); + + var Oid = NodeGit.Oid; + var Repository = NodeGit.Repository; + var FileMode = NodeGit.TreeEntry.FILEMODE; + + var reposPath = local("../repos/workdir"); + var oid = "111dd657329797f6165f52f5085f61ac976dcf04"; + + beforeEach(function() { + var test = this; + + return Repository.open(reposPath) + .then(function(repository) { + test.repository = repository; + + return repository.getBlob(oid); + }) + .then(function(blob) { + test.blob = blob; + }); + }); + + it("can provide content as a buffer", function() { + var contents = this.blob.content(); + + assert.ok(Buffer.isBuffer(contents)); + }); + + it("can provide content as a string", function() { + var contents = this.blob.toString(); + + assert.equal(typeof contents, "string"); + assert.equal(contents.slice(0, 7), "@import"); + }); + + it("can determine if a blob is not a binary", function() { + assert.equal(this.blob.filemode(), FileMode.BLOB); + }); + + it("can get a blob with an Oid object", function() { + var oidObject = Oid.fromString(oid); + return this.repository.getBlob(oidObject) + .then(function(blob) { + assert.equal(blob.id().toString(), oid); + }); + }); +}); diff --git a/test/tests/branch.js b/test/tests/branch.js new file mode 100644 index 000000000..8e69020a8 --- /dev/null +++ b/test/tests/branch.js @@ -0,0 +1,80 @@ +var assert = require("assert"); +var path = require("path"); +var local = path.join.bind(path, __dirname); + +describe("Branch", function() { + var NodeGit = require("../../"); + var Repository = NodeGit.Repository; + var Branch = NodeGit.Branch; + var branchName = "test-branch"; + var fullBranchName = "refs/heads/" + branchName; + var upstreamName = "origin/master"; + + var reposPath = local("../repos/workdir"); + + beforeEach(function() { + var test = this; + + return Repository.open(reposPath) + .then(function(repository) { + test.repository = repository; + return repository.getMasterCommit(); + }) + .then(function(masterCommit) { + test.masterCommit = masterCommit; + + return test.repository.createBranch(branchName, masterCommit, true); + }) + .then(function(branch) { + test.branch = branch; + }); + }); + + it("can create a branch", function() { + var branch = this.branch; + var masterCommit = this.masterCommit; + + assert.equal(branch.name(), fullBranchName); + assert.equal(branch.target().toString(), masterCommit.sha()); + }); + + it("can delete a branch", function() { + var repo = this.repository; + + Branch.delete(this.branch); + + return repo.getBranch(branchName) + // Reverse the results, since if we found it it wasn't deleted + .then(Promise.reject.bind(Promise), Promise.resolve.bind(Promise)); + }); + + it("can see if the branch is pointed to by head", function() { + var repo = this.repository; + + return repo.getBranch("master") + .then(function(branch) { + assert.ok(branch.isHead()); + }); + }); + + it("can set an upstream for a branch", function() { + var branch = this.branch; + + return NodeGit.Branch.setUpstream(branch, upstreamName) + .then(function() { + return NodeGit.Branch.upstream(branch); + }) + .then(function(upstream) { + assert.equal(upstream.shorthand(), upstreamName); + }); + }); + + it("can get the name of a branch", function() { + var branch = this.branch; + + return NodeGit.Branch.name(branch) + .then(function(branchNameToTest) { + assert.equal(branchNameToTest, branchName); + }); + }); +}); diff --git a/test/tests/checkout.js b/test/tests/checkout.js new file mode 100644 index 000000000..e3815a35f --- /dev/null +++ b/test/tests/checkout.js @@ -0,0 +1,186 @@ +var assert = require("assert"); +var path = require("path"); +var fse = require("fs-extra"); +var local = path.join.bind(path, __dirname); + +describe("Checkout", function() { + var NodeGit = require("../../"); + var Repository = NodeGit.Repository; + var Checkout = NodeGit.Checkout; + + var readMeName = "README.md"; + var packageJsonName = "package.json"; + var reposPath = local("../repos/workdir"); + var readMePath = local("../repos/workdir/" + readMeName); + var packageJsonPath = local("../repos/workdir/" + packageJsonName); + var checkoutBranchName = "checkout-test"; + + beforeEach(function() { + var test = this; + + return Repository.open(reposPath) + .then(function(repo) { + test.repository = repo; + }); + }); + + it("can checkout the head", function() { + var test = this; + + return Checkout.head(test.repository) + .then(function(blob) { + var packageContent = fse.readFileSync(packageJsonPath, "utf-8"); + + assert.ok(~packageContent.indexOf("\"ejs\": \"~1.0.0\",")); + }); + }); + + it("can force checkout a single file", function() { + var test = this; + + var packageContent = fse.readFileSync(packageJsonPath, "utf-8"); + var readmeContent = fse.readFileSync(readMePath, "utf-8"); + + assert.notEqual(packageContent, ""); + assert.notEqual(readmeContent, ""); + + fse.outputFileSync(readMePath, ""); + fse.outputFileSync(packageJsonPath, ""); + + var opts = { + checkoutStrategy: Checkout.STRATEGY.FORCE, + paths: packageJsonName + }; + + return Checkout.head(test.repository, opts) + .then(function() { + var resetPackageContent = fse.readFileSync(packageJsonPath, "utf-8"); + var resetReadmeContent = fse.readFileSync(readMePath, "utf-8"); + + assert.equal(resetPackageContent, packageContent); + assert.equal(resetReadmeContent, ""); + + var resetOpts = { + checkoutStrategy: Checkout.STRATEGY.FORCE + }; + + return Checkout.head(test.repository, resetOpts); + }).then(function() { + var resetContent = fse.readFileSync(readMePath, "utf-8"); + assert.equal(resetContent, readmeContent); + }); + }); + + it("can checkout by tree", function() { + var test = this; + + return test.repository.getTagByName("annotated-tag").then(function(tag) { + return Checkout.tree(test.repository, tag); + }).then(function() { + return test.repository.getHeadCommit(); + }).then(function(commit) { + assert.equal(commit, "32789a79e71fbc9e04d3eff7425e1771eb595150"); + }); + }); + + it("can checkout a branch", function() { + var test = this; + + return test.repository.checkoutBranch(checkoutBranchName) + .then(function() { + var packageContent = fse.readFileSync(packageJsonPath, "utf-8"); + + assert.ok(!~packageContent.indexOf("\"ejs\": \"~1.0.0\",")); + }) + .then(function() { + return test.repository.getStatus(); + }) + .then(function(statuses) { + assert.equal(statuses.length, 0); + }) + .then(function() { + return test.repository.checkoutBranch("master"); + }) + .then(function() { + var packageContent = fse.readFileSync(packageJsonPath, "utf-8"); + + assert.ok(~packageContent.indexOf("\"ejs\": \"~1.0.0\",")); + }); + }); + + it("can checkout an index with conflicts", function() { + var test = this; + + var testBranchName = "test"; + var ourCommit; + + return test.repository.getBranchCommit(checkoutBranchName) + .then(function(commit) { + ourCommit = commit; + + return test.repository.createBranch(testBranchName, commit.id()); + }) + .then(function() { + return test.repository.checkoutBranch(testBranchName); + }) + .then(function(branch) { + fse.writeFileSync(packageJsonPath, "\n"); + + return test.repository.refreshIndex() + .then(function(index) { + return index.addByPath(packageJsonName) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }); + }); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "85135ab398976a4d5be6a8704297a45f2b1e7ab2"); + + var signature = test.repository.defaultSignature(); + + return test.repository.createCommit("refs/heads/" + testBranchName, + signature, signature, "we made breaking changes", oid, [ourCommit]); + }) + .then(function(commit) { + return Promise.all([ + test.repository.getBranchCommit(testBranchName), + test.repository.getBranchCommit("master") + ]); + }) + .then(function(commits) { + return NodeGit.Merge.commits(test.repository, commits[0], commits[1], + null); + }) + .then(function(index) { + assert.ok(index); + assert.ok(index.hasConflicts && index.hasConflicts()); + + return NodeGit.Checkout.index(test.repository, index); + }) + .then(function() { + // Verify that the conflict has been written to disk + var conflictedContent = fse.readFileSync(packageJsonPath, "utf-8"); + + assert.ok(~conflictedContent.indexOf("<<<<<<< ours")); + assert.ok(~conflictedContent.indexOf("=======")); + assert.ok(~conflictedContent.indexOf(">>>>>>> theirs")); + + // Cleanup + var opts = { + checkoutStrategy: Checkout.STRATEGY.FORCE, + paths: packageJsonName + }; + + return Checkout.head(test.repository, opts); + }) + .then(function() { + var finalContent = fse.readFileSync(packageJsonPath, "utf-8"); + assert.equal(finalContent, "\n"); + }); + }); +}); diff --git a/test/tests/cherrypick.js b/test/tests/cherrypick.js new file mode 100644 index 000000000..e4f2ca7e4 --- /dev/null +++ b/test/tests/cherrypick.js @@ -0,0 +1,160 @@ +var assert = require("assert"); +var path = require("path"); +var local = path.join.bind(path, __dirname); +var promisify = require("promisify-node"); +var fse = promisify(require("fs-extra")); + +describe("Cherrypick", function() { + var RepoUtils = require("../utils/repository_setup"); + var NodeGit = require("../../"); + var Cherrypick = NodeGit.Cherrypick; + + var repoPath = local("../repos/cherrypick"); + + beforeEach(function() { + var test = this; + return RepoUtils.createRepository(repoPath) + .then(function(repo) { + test.repository = repo; + }); + }); + + after(function() { + return fse.remove(repoPath); + }); + + it("can cherrypick a commit onto the index", function() { + var repo = this.repository; + var workDirPath = repo.workdir(); + var repoInfo; + + return RepoUtils.setupBranches(repo, true) + .then(function(info) { + repoInfo = info; + + assert(!fse.existsSync(path.join(workDirPath, repoInfo.theirFileName)), + repoInfo.theirFileName + " shouldn't exist"); + + var promise = Cherrypick.cherrypick(repo, repoInfo.theirCommit, {}); + assert(promise.then); + return promise; + }) + .then(function() { + assert(fse.existsSync(path.join(workDirPath, repoInfo.theirFileName)), + repoInfo.theirFileName + " should exist"); + + // Cherrypick.cherrypick leaves the repo in a cherrypick state + assert.equal(repo.state(), NodeGit.Repository.STATE.CHERRYPICK); + assert.ok(repo.isCherrypicking()); + + // cleanup + assert.equal(repo.stateCleanup(), 0); + assert.equal(repo.state(), NodeGit.Repository.STATE.NONE); + assert.ok(repo.isDefaultState()); + }); + }); + + it("can cherrypick a commit onto another specified commit", function() { + var repo = this.repository; + var workDirPath = repo.workdir(); + var repoInfo; + + return RepoUtils.setupBranches(repo) + .then(function(info) { + repoInfo = info; + + assert(!fse.existsSync(path.join(workDirPath, repoInfo.ourFileName)), + repoInfo.ourFileName + " shouldn't exist"); + assert(!fse.existsSync(path.join(workDirPath, repoInfo.theirFileName)), + repoInfo.theirFileName + " shouldn't exist"); + + var promise = Cherrypick.commit(repo, repoInfo.theirCommit, + repoInfo.ourCommit, 0, {}); + assert(promise.then); + return promise; + }) + .then(function(index) { + assert(index); + return index.writeTreeTo(repo); + }) + .then(function(oid) { + return repo.getTree(oid); + }) + .then(function(tree) { + var opts = { + checkoutStrategy: NodeGit.Checkout.STRATEGY.FORCE + }; + + return NodeGit.Checkout.tree(repo, tree, opts); + }) + .then(function() { + assert(fse.existsSync(path.join(workDirPath, repoInfo.ourFileName)), + repoInfo.ourFileName + " should exist"); + assert(fse.existsSync(path.join(workDirPath, repoInfo.theirFileName)), + repoInfo.theirFileName + " should exist"); + }); + }); + + it("can cherrypick a stash to apply it", function() { + var repo = this.repository; + var workDirPath = repo.workdir(); + var repoInfo; + var cherrypickOid; + + var addedContent = "\nIt makes things E-Z!"; + + return RepoUtils.setupBranches(repo, true) + .then(function(info) { + repoInfo = info; + + return repo.getStatus(); + }) + .then(function(statuses) { + assert.equal(statuses.length, 0); + + return fse.writeFile(path.join(workDirPath, repoInfo.ourFileName), + repoInfo.ourFileContent + addedContent); + }) + .then(function() { + return repo.getStatus(); + }) + .then(function(statuses) { + assert.equal(statuses.length, 1); + + return NodeGit.Stash.save(repo, repoInfo.ourSignature, "our stash", 0); + }) + .then(function(oid) { + cherrypickOid = oid; + + return fse.readFile(path.join(workDirPath, repoInfo.ourFileName)); + }) + .then(function(fileContent) { + assert.equal(fileContent, repoInfo.ourFileContent); + + return repo.getStatus(); + }) + .then(function(statuses) { + assert.equal(statuses.length, 0); + + return repo.getCommit(cherrypickOid); + }) + .then(function(commit) { + var opts = { + mainline: 1 + }; + + return Cherrypick.cherrypick(repo, commit, opts); + }) + .then(function() { + return repo.getStatus(); + }) + .then(function(statuses) { + assert.equal(statuses.length, 1); + + return fse.readFile(path.join(workDirPath, repoInfo.ourFileName)); + }) + .then(function(fileContent) { + assert.equal(fileContent, repoInfo.ourFileContent + addedContent); + }); + }); +}); diff --git a/test/tests/clone.js b/test/tests/clone.js new file mode 100644 index 000000000..c7645357d --- /dev/null +++ b/test/tests/clone.js @@ -0,0 +1,317 @@ +var path = require("path"); +var assert = require("assert"); +var promisify = require("promisify-node"); +var fse = promisify(require("fs-extra")); +var local = path.join.bind(path, __dirname); +var _ = require("lodash"); + +describe("Clone", function() { + var NodeGit = require("../../"); + var Repository = NodeGit.Repository; + var Clone = NodeGit.Clone; + + var clonePath = local("../repos/clone"); + + var sshPublicKeyPath = local("../id_rsa.pub"); + var sshPrivateKeyPath = local("../id_rsa"); + var sshEncryptedPublicKeyPath = local("../encrypted_rsa.pub"); + var sshEncryptedPrivateKeyPath = local("../encrypted_rsa"); + + // Set a reasonable timeout here now that our repository has grown. + this.timeout(30000); + + beforeEach(function() { + return fse.remove(clonePath).catch(function(err) { + console.log(err); + + throw err; + }); + }); + + it.skip("can clone with http", function() { + var test = this; + var url = "http://git.tbranyen.com/smart/site-content"; + + return Clone(url, clonePath).then(function(repo) { + assert.ok(repo instanceof Repository); + test.repository = repo; + }); + }); + + it("can clone with https", function() { + var test = this; + var url = "https://github.com/nodegit/test.git"; + var opts = { + fetchOpts: { + callbacks: { + certificateCheck: function() { + return 1; + } + } + } + }; + + return Clone(url, clonePath, opts).then(function(repo) { + assert.ok(repo instanceof Repository); + test.repository = repo; + }); + }); + + it("can clone twice with https using same config object", function() { + var test = this; + var url = "https://github.com/nodegit/test.git"; + var progressCount = 0; + var opts = { + fetchOpts: { + callbacks: { + transferProgress: function(progress) { + progressCount++; + } + } + } + }; + + return Clone(url, clonePath, opts) + .then(function(repo) { + assert.ok(repo instanceof Repository); + assert.notEqual(progressCount, 0); + return fse.remove(clonePath); + }) + .then(function() { + progressCount = 0; + return Clone(url, clonePath, opts); + }) + .then(function(repo) { + assert.ok(repo instanceof Repository); + assert.notEqual(progressCount, 0); + test.repository = repo; + }); + }); + + function updateProgressIntervals(progressIntervals, lastInvocation) { + var now = new Date(); + if (lastInvocation) { + progressIntervals.push(now - lastInvocation); + } + return now; + } + + it("can clone with https and default throttled progress", function() { + var test = this; + var url = "https://github.com/nodegit/test.git"; + var progressCount = 0; + var lastInvocation; + var progressIntervals = []; + var opts = { + fetchOpts: { + callbacks: { + transferProgress: function(progress) { + lastInvocation = updateProgressIntervals(progressIntervals, + lastInvocation); + progressCount++; + } + } + } + }; + + return Clone(url, clonePath, opts).then(function(repo) { + assert.ok(repo instanceof Repository); + assert.notEqual(progressCount, 0); + var averageProgressInterval = _.sum(progressIntervals) / + progressIntervals.length; + // even though we are specifying a throttle period of 100, + // the throttle is applied on the scheduling side, + // and actual execution is at the mercy of the main js thread + // so the actual throttle intervals could be less than the specified + // throttle period + if (!averageProgressInterval || averageProgressInterval < 75) { + assert.fail(averageProgressInterval, 75, + "unexpected average time between callbacks", "<"); + } + test.repository = repo; + }); + }); + + it("can clone with https and explicitly throttled progress", function() { + var test = this; + var url = "https://github.com/nodegit/test.git"; + var progressCount = 0; + var lastInvocation; + var progressIntervals = []; + var opts = { + fetchOpts: { + callbacks: { + transferProgress: { + throttle: 50, + callback: function(progress) { + lastInvocation = updateProgressIntervals(progressIntervals, + lastInvocation); + progressCount++; + } + } + } + } + }; + + return Clone(url, clonePath, opts).then(function(repo) { + assert.ok(repo instanceof Repository); + assert.notEqual(progressCount, 0); + var averageProgressInterval = _.sum(progressIntervals) / + progressIntervals.length; + if (!averageProgressInterval || averageProgressInterval < 35) { + assert.fail(averageProgressInterval, 35, + "unexpected average time between callbacks", "<"); + } + test.repository = repo; + }); + }); + + it("can clone using nested function", function() { + var test = this; + var url = "https://github.com/nodegit/test.git"; + var opts = { + fetchOpts: { + callbacks: { + certificateCheck: function() { + return 1; + } + } + } + }; + + return Clone.clone(url, clonePath, opts).then(function(repo) { + assert.ok(repo instanceof Repository); + test.repository = repo; + }); + }); + + it("can clone with ssh", function() { + var test = this; + var url = "git@github.com:nodegit/test.git"; + var opts = { + fetchOpts: { + callbacks: { + certificateCheck: function() { + return 1; + }, + credentials: function(url, userName) { + return NodeGit.Cred.sshKeyFromAgent(userName); + } + } + } + }; + + return Clone(url, clonePath, opts).then(function(repo) { + assert.ok(repo instanceof Repository); + test.repository = repo; + }); + }); + + it("can clone with ssh while manually loading a key", function() { + var test = this; + var url = "git@github.com:nodegit/test.git"; + var opts = { + fetchOpts: { + callbacks: { + certificateCheck: function() { + return 1; + }, + credentials: function(url, userName) { + return NodeGit.Cred.sshKeyNew( + userName, + sshPublicKeyPath, + sshPrivateKeyPath, + ""); + } + } + } + }; + + return Clone(url, clonePath, opts).then(function(repo) { + assert.ok(repo instanceof Repository); + test.repository = repo; + }); + }); + + it("can clone with ssh while manually loading an encrypted key", function() { + var test = this; + var url = "git@github.com:nodegit/test.git"; + var opts = { + fetchOpts: { + callbacks: { + certificateCheck: function() { + return 1; + }, + credentials: function(url, userName) { + return NodeGit.Cred.sshKeyNew( + userName, + sshEncryptedPublicKeyPath, + sshEncryptedPrivateKeyPath, + "test-password" + ); + } + } + } + }; + + return Clone(url, clonePath, opts).then(function(repo) { + assert.ok(repo instanceof Repository); + test.repository = repo; + }); + }); + + it("can clone with git", function() { + var test = this; + var url = "git://github.com/nodegit/test.git"; + var opts = { + fetchOpts: { + callbacks: { + certificateCheck: function() { + return 1; + } + } + } + }; + + return Clone(url, clonePath, opts).then(function(repo) { + test.repository = repo; + assert.ok(repo instanceof Repository); + }); + }); + + it("can clone with filesystem", function() { + var test = this; + var prefix = process.platform === "win32" ? "" : "file://"; + var url = prefix + local("../repos/empty"); + + return Clone(url, clonePath).then(function(repo) { + assert.ok(repo instanceof Repository); + test.repository = repo; + }); + }); + + it("will not segfault when accessing a url without username", function() { + var url = "https://github.com/nodegit/private"; + + var firstPass = true; + + return Clone(url, clonePath, { + fetchOpts: { + callbacks: { + certificateCheck: function() { + return 1; + }, + credentials: function() { + if (firstPass) { + firstPass = false; + return NodeGit.Cred.userpassPlaintextNew("fake-token", + "x-oauth-basic"); + } else { + return NodeGit.Cred.defaultNew(); + } + } + } + } + }).catch(function(reason) { }); + }); +}); diff --git a/test/tests/commit.js b/test/tests/commit.js new file mode 100644 index 000000000..be066093e --- /dev/null +++ b/test/tests/commit.js @@ -0,0 +1,654 @@ +var assert = require("assert"); +var path = require("path"); +var promisify = require("promisify-node"); +var fse = promisify(require("fs-extra")); + +var garbageCollect = require("../utils/garbage_collect.js"); +var leakTest = require("../utils/leak_test"); + +var local = path.join.bind(path, __dirname); + +var exec = require("../../utils/execPromise"); + +describe("Commit", function() { + var NodeGit = require("../../"); + var Repository = NodeGit.Repository; + var Diff = NodeGit.Diff; + + var reposPath = local("../repos/workdir"); + var oid = "fce88902e66c72b5b93e75bdb5ae717038b221f6"; + + function reinitialize(test) { + return Repository.open(reposPath) + .then(function(repository) { + test.repository = repository; + + return repository.getCommit(oid); + }) + .then(function(commit) { + test.commit = commit; + }); + } + + function commitFile(repo, fileName, fileContent, commitMessage) { + var index; + var treeOid; + var parent; + + return fse.writeFile(path.join(repo.workdir(), fileName), fileContent) + .then(function() { + return repo.refreshIndex(); + }) + .then(function(indexResult) { + index = indexResult; + }) + .then(function() { + return index.addByPath(fileName); + }) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }) + .then(function(oidResult) { + treeOid = oidResult; + return NodeGit.Reference.nameToId(repo, "HEAD"); + }) + .then(function(head) { + return repo.getCommit(head); + }) + .then(function(parentResult) { + parent = parentResult; + return Promise.all([ + NodeGit.Signature.create("Foo Bar", "foo@bar.com", 123456789, 60), + NodeGit.Signature.create("Foo A Bar", "foo@bar.com", 987654321, 90) + ]); + }) + .then(function(signatures) { + var author = signatures[0]; + var committer = signatures[1]; + + return repo.createCommit( + "HEAD", + author, + committer, + "message", + treeOid, + [parent]); + }); + } + + function undoCommit() { + return exec("git reset --hard HEAD~1", {cwd: reposPath}); + } + + beforeEach(function() { + return reinitialize(this); + }); + + it("will fail with an invalid sha", function() { + return this.repository.getCommit("invalid").then(null, function(err) { + assert.ok(err instanceof Error); + }); + }); + + it("has a message", function() { + assert.equal(this.commit.message(), "Update README.md"); + }); + + it("has a raw message", function() { + assert.equal(this.commit.messageRaw(), "Update README.md"); + }); + + it("has a message encoding", function() { + var encoding = this.commit.messageEncoding(); + assert.ok(encoding === "UTF-8" || encoding === undefined); + }); + + it("has a summary", function() { + assert.equal(this.commit.summary(), "Update README.md"); + }); + + it("has a sha", function() { + assert.equal(this.commit.sha(), oid); + }); + + it("has a time", function() { + assert.equal(this.commit.timeMs(), 1362012884000); + }); + + it("has a date", function() { + assert.equal(this.commit.date().getTime(), 1362012884000); + }); + + it("has a time offset", function() { + assert.equal(this.commit.timeOffset(), 780); + }); + + it("can create a commit", function() { + var test = this; + var expectedCommitId = "315e77328ef596f3bc065d8ac6dd2c72c09de8a5"; + var fileName = "newfile.txt"; + var fileContent = "hello world"; + + var repo; + var index; + var treeOid; + var parent; + + return NodeGit.Repository.open(reposPath) + .then(function(repoResult) { + repo = repoResult; + return fse.writeFile(path.join(repo.workdir(), fileName), fileContent); + }) + .then(function() { + return repo.refreshIndex(); + }) + .then(function(indexResult) { + index = indexResult; + }) + .then(function() { + return index.addByPath(fileName); + }) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }) + .then(function(oidResult) { + treeOid = oidResult; + return NodeGit.Reference.nameToId(repo, "HEAD"); + }) + .then(function(head) { + return repo.getCommit(head); + }) + .then(function(parentResult) { + parent = parentResult; + return Promise.all([ + NodeGit.Signature.create("Foo Bar", "foo@bar.com", 123456789, 60), + NodeGit.Signature.create("Foo A Bar", "foo@bar.com", 987654321, 90) + ]); + }) + .then(function(signatures) { + var author = signatures[0]; + var committer = signatures[1]; + + return repo.createCommit( + "HEAD", + author, + committer, + "message", + treeOid, + [parent]); + }) + .then(function(commitId) { + assert.equal(expectedCommitId, commitId); + return undoCommit() + .then(function(){ + return reinitialize(test); + }); + }, function(reason) { + return reinitialize(test) + .then(function() { + return Promise.reject(reason); + }); + }); + }); + + + it("can amend commit", function(){ + var commitToAmendId = "315e77328ef596f3bc065d8ac6dd2c72c09de8a5"; + var expectedAmendedCommitId = "57836e96555243666ea74ea888310cc7c41d4613"; + var fileName = "newfile.txt"; + var fileContent = "hello world"; + var newFileName = "newerfile.txt"; + var newFileContent = "goodbye world"; + var messageEncoding = "US-ASCII"; + var message = "First commit"; + + var repo; + var index; + var treeOid; + var parent; + var author; + var committer; + var amendedCommitId; + + return NodeGit.Repository.open(reposPath) + .then(function(repoResult) { + repo = repoResult; + return fse.writeFile(path.join(repo.workdir(), fileName), fileContent); + }) + .then(function() { + return repo.refreshIndex(); + }) + .then(function(indexResult) { + index = indexResult; + }) + .then(function() { + return index.addByPath(fileName); + }) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }) + .then(function(oidResult) { + treeOid = oidResult; + return NodeGit.Reference.nameToId(repo, "HEAD"); + }) + .then(function(head) { + return repo.getCommit(head); + }) + .then(function(parentResult) { + parent = parentResult; + return Promise.all([ + NodeGit.Signature.create("Foo Bar", "foo@bar.com", 123456789, 60), + NodeGit.Signature.create("Foo A Bar", "foo@bar.com", 987654321, 90) + ]); + }) + .then(function(signatures) { + var author = signatures[0]; + var committer = signatures[1]; + + return repo.createCommit( + "HEAD", + author, + committer, + "message", + treeOid, + [parent]); + }) + .then(function() { + return fse.writeFile( + path.join(repo.workdir(), newFileName), + newFileContent + ); + }) + .then(function() { + return repo.refreshIndex(); + }) + .then(function(indexResult) { + index = indexResult; + }) + .then(function() { + return index.addByPath(newFileName); + }) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }) + .then(function(resultOid){ + treeOid = resultOid; + return Promise.all([ + repo.getCommit(commitToAmendId), + NodeGit.Signature.create( + "New Foo Bar", + "newfoo@bar.com", + 246802468, + 12 + ), + NodeGit.Signature.create( + "New Foo A Bar", + "newfoo@bar.com", + 4807891730, + 32 + ) + ]); + + }) + .then(function(amendInfo){ + var commit = amendInfo[0]; + author = amendInfo[1]; + committer = amendInfo[2]; + return commit.amend( + "HEAD", + author, + committer, + messageEncoding, + message, + treeOid + ); + }) + .then(function(commitId){ + amendedCommitId = commitId; + return undoCommit(); + }) + .then(function(){ + assert.equal(amendedCommitId, expectedAmendedCommitId); + }); + }); + + it("can amend commit and update reference separately", function() { + var customReflogMessage = "updating reference manually"; + + var head, repo, oid, originalReflogCount; + + return NodeGit.Repository.open(reposPath) + .then(function(repoResult) { + repo = repoResult; + // grab the original reflog entry count (to make sure .amend + // doesn't add a reflog entry when not given a reference) + return NodeGit.Reflog.read(repo, "HEAD"); + }) + .then(function(reflog) { + originalReflogCount = reflog.entrycount(); + // get the head reference and commit + return repo.head(); + }) + .then(function(headResult) { + head = headResult; + return repo.getHeadCommit(); + }) + .then(function(headCommit) { + // amend the commit but don't update any reference + // (passing null as update_ref) + return headCommit.amend( + null, + null, + null, + "message", + null, + null); + }).then(function(oidResult) { + oid = oidResult; + // update the reference manually + return head.setTarget(oid, customReflogMessage); + }).then(function() { + // load reflog and make sure the last message is what we expected + return NodeGit.Reflog.read(repo, "HEAD"); + }).then(function(reflog) { + var reflogEntry = reflog.entryByIndex(0); + assert.equal( + reflogEntry.message(), + customReflogMessage + ); + assert.equal( + reflogEntry.idNew().toString(), + oid + ); + // only setTarget should have added to the entrycount + assert.equal(reflog.entrycount(), originalReflogCount + 1); + }); + }); + + it("has an owner", function() { + var owner = this.commit.owner(); + assert.ok(owner instanceof Repository); + }); + + it("can walk its repository's history", function(done) { + var historyCount = 0; + var expectedHistoryCount = 364; + + var history = this.commit.history(); + + history.on("commit", function(commit) { + historyCount++; + }); + + history.on("end", function(commits) { + assert.equal(historyCount, expectedHistoryCount); + assert.equal(commits.length, expectedHistoryCount); + + done(); + }); + + history.on("error", function(err) { + assert.ok(false); + }); + + history.start(); + }); + + it("can fetch the master branch HEAD", function() { + var repository = this.repository; + + return repository.getBranchCommit("master").then(function(commit) { + return repository.getCommit(commit.sha()); + }); + }); + + it("can fetch all of its parents", function() { + return this.commit.getParents().then(function(parents) { + assert.equal(parents.length, 1); + + var sha = parents[0].sha(); + assert.equal(sha, "ecfd36c80a3e9081f200dfda2391acadb56dac27"); + }); + }); + + it("can specify a parents limit", function() { + return this.commit.getParents(0).then(function(parents) { + assert.equal(parents.length, 0); + }); + }); + + it("can specify limit higher than actual parents", function() { + return this.commit.getParents(2).then(function(parents) { + assert.equal(parents.length, 1); + }); + }); + + it("can fetch parents of a merge commit", function () { + return NodeGit.Repository.open(reposPath) + .then(function (repo) { + return repo.getCommit("bf1da765e357a9b936d6d511f2c7b78e0de53632"); + }) + .then(function (commit) { + return commit.getParents(); + }) + .then(function (parents) { + assert.equal(parents.length, 2); + }); + }); + + it("has a parent count", function() { + assert.equal(1, this.commit.parentcount()); + }); + + it("can retrieve and walk a commit tree", function() { + var commitTreeEntryCount = 0; + var expectedCommitTreeEntryCount = 198; + + return this.commit.getTree().then(function(tree) { + return new Promise(function(resolve, fail) { + + var treeWalker = tree.walk(); + + treeWalker.on("entry", function(entry) { + commitTreeEntryCount++; + }); + + treeWalker.on("error", function(error) { + fail(error); + }); + + treeWalker.on("end", function(entries) { + try { + assert.equal(commitTreeEntryCount, expectedCommitTreeEntryCount); + resolve(); + } + catch (e) { + fail(e); + } + }); + + treeWalker.start(); + }); + }); + }); + + it("can get the commit diff", function() { + return this.commit.getDiff().then(function(diff) { + assert.equal(diff.length, 1); + }); + }); + + // it("can get the commit diff in large context", function() { + // For displaying the full file we can set context_lines of options. + // Eventually this should work, but right now there is a + // comment in diff.c in libgit2 of "/* TODO: parse thresholds */" + // It will add the "--unified" but not with the "=x" part. + // options.context_lines = 20000; + // }); + + it("can get the commit diff without whitespace", function() { + var repo; + var options = {}; + var GIT_DIFF_IGNORE_WHITESPACE = (1 << 22); + options.flags = GIT_DIFF_IGNORE_WHITESPACE; + + var fileName = "whitespacetest.txt"; + var fileContent = "line a\nline b\nline c\nline d\n line e\nline f\n" + + "line g\nline h\nline i\n line j\nline k\nline l\n" + + "line m\nline n\n line o\nline p\nline q\n" + + "line r\nline s\nline t\nline u\nline v\nline w\n" + + "line x\nline y\nline z\n"; + var changedFileContent = "line a\nline b\n line c\nline d\n" + + "line e\nline f\nline g\n line h\nline i\nline j\n" + + "line k\nline l\nline m\nline n\nline o\nlinep\n" + + " line q\nline r\nline s\nline t\n\nline u\n" + + "line v1\nline w\nline x\n \nline y\nline z\n"; + + return NodeGit.Repository.open(reposPath) + .then(function(repoResult) { + repo = repoResult; + return commitFile(repo, fileName, fileContent, "commit this"); + }) + .then(function(){ + return commitFile(repo, fileName, changedFileContent, "commit that"); + }) + .then (function() { + return repo.getHeadCommit(); + }) + .then (function(wsCommit) { + return wsCommit.getDiffWithOptions(options); + }) + .then(function(diff) { + assert.equal(diff.length, 1); + return diff[0].patches(); + }) + .then(function(patches) { + assert.equal(patches.length, 1); + var patch = patches[0]; + + assert.equal(patch.oldFile().path(), fileName); + assert.equal(patch.newFile().path(), fileName); + assert.ok(patch.isModified()); + + return patch.hunks(); + }) + .then(function(hunks) { + return hunks[0].lines(); + }) + .then(function(lines) { + //check all hunk lines + assert.equal(lines.length, 12); + assert.equal(lines[0].origin(), Diff.LINE.CONTEXT); + + assert.equal(lines[1].content().length, 9); + assert.equal(lines[1].content(), "line s\n"); + assert.equal(lines[1].origin(), Diff.LINE.CONTEXT); + + assert.equal(lines[2].origin(), Diff.LINE.CONTEXT); + + assert.equal(lines[3].content().length, 1); + assert.equal(lines[3].content(), "\n"); + assert.equal(lines[3].origin(), Diff.LINE.ADDITION); + + assert.equal(lines[4].origin(), Diff.LINE.CONTEXT); + + assert.equal(lines[5].content().length, 7); + assert.equal(lines[5].content(), "line v\n"); + assert.equal(lines[5].origin(), Diff.LINE.DELETION); + + assert.equal(lines[6].content().length, 8); + assert.equal(lines[6].content(), "line v1\n"); + assert.equal(lines[6].origin(), Diff.LINE.ADDITION); + + assert.equal(lines[7].origin(), Diff.LINE.CONTEXT); + + assert.equal(lines[8].origin(), Diff.LINE.CONTEXT); + + assert.equal(lines[9].content().length, 4); + assert.equal(lines[9].content(), "\t\t\t\n"); + assert.equal(lines[9].origin(), Diff.LINE.ADDITION); + + assert.equal(lines[10].origin(), Diff.LINE.CONTEXT); + + assert.equal(lines[11].origin(), Diff.LINE.CONTEXT); + }); + }); + + describe("Commit's Author", function() { + before(function() { + this.author = this.commit.author(); + }); + + it("is available", function() { + assert.ok(this.author instanceof NodeGit.Signature); + }); + + it("has a name", function() { + assert.equal(this.author.name(), "Michael Robinson"); + }); + + it("has an email", function() { + assert.equal(this.author.email(), "mike@panmedia.co.nz"); + }); + }); + + describe("Commit's Committer", function() { + before(function() { + this.committer = this.commit.committer(); + }); + + it("is available", function() { + assert.ok(this.committer instanceof NodeGit.Signature); + }); + + it("has a name", function() { + assert.equal(this.committer.name(), "Michael Robinson"); + }); + + it("has an email", function() { + assert.equal(this.committer.email(), "mike@panmedia.co.nz"); + }); + }); + + it("does not leak", function() { + var test = this; + + return leakTest(NodeGit.Commit, function() { + return NodeGit.Commit.lookup(test.repository, oid); + }); + }); + + it("duplicates signature", function() { + garbageCollect(); + var Signature = NodeGit.Signature; + var startSelfFreeingCount = Signature.getSelfFreeingInstanceCount(); + var startNonSelfFreeingCount = + Signature.getNonSelfFreeingConstructedCount(); + var signature = this.commit.author(); + + garbageCollect(); + var endSelfFreeingCount = Signature.getSelfFreeingInstanceCount(); + var endNonSelfFreeingCount = Signature.getNonSelfFreeingConstructedCount(); + // we should get one duplicated, self-freeing signature + assert.equal(startSelfFreeingCount + 1, endSelfFreeingCount); + assert.equal(startNonSelfFreeingCount, endNonSelfFreeingCount); + + signature = null; + garbageCollect(); + endSelfFreeingCount = Signature.getSelfFreeingInstanceCount(); + // the self-freeing signature should get freed + assert.equal(startSelfFreeingCount, endSelfFreeingCount); + }); +}); diff --git a/test/tests/config.js b/test/tests/config.js new file mode 100644 index 000000000..3ac6fca48 --- /dev/null +++ b/test/tests/config.js @@ -0,0 +1,95 @@ +var assert = require("assert"); +var path = require("path"); +var local = path.join.bind(path, __dirname); + +var exec = require("../../utils/execPromise"); + +describe("Config", function() { + var NodeGit = require("../../"); + + var reposPath = local("../repos/workdir"); + + it("can get and set a global value", function() { + var savedUserName; + + function finallyFn() { + return exec("git config --global user.name \"" + savedUserName + "\""); + } + + return exec("git config --global user.name") + .then(function(userName) { + savedUserName = userName.trim(); + + return exec( + "git config --global user.name \"" + savedUserName + "-test\""); + }) + .then(function() { + return NodeGit.Config.openDefault(); + }) + .then(function(config) { + return config.getString("user.name"); + }) + .then(function(userNameFromNodeGit) { + assert.equal(savedUserName + "-test", userNameFromNodeGit); + }) + .then(finallyFn) + .catch(function(e) { + return finallyFn() + .then(function() { + throw e; + }); + }); + }); + + it("will reject when getting value of non-existent config key", function() { + // Test initially for finding source of a segfault. There was a problem + // where getting an empty config value crashes nodegit. + return NodeGit.Config.openDefault() + .then(function(config) { + return config.getString("user.fakevalue"); + }) + .catch(function (e) { + return true; + }); + }); + + it("can get and set a repo config value", function() { + var savedUserName; + + function finallyFn() { + return exec("git config user.name \"" + savedUserName + "\"", { + cwd: reposPath + }); + } + + return exec("git config user.name", { + cwd: reposPath + }) + .then(function(userName) { + savedUserName = userName.trim(); + + return exec("git config user.name \"" + savedUserName + "-test\"", { + cwd: reposPath + }); + }) + .then(function() { + return NodeGit.Repository.open(reposPath); + }) + .then(function(repo) { + return repo.config(); + }) + .then(function(config) { + return config.getString("user.name"); + }) + .then(function(userNameFromNodeGit) { + assert.equal(savedUserName + "-test", userNameFromNodeGit); + }) + .then(finallyFn) + .catch(function(e) { + return finallyFn() + .then(function() { + throw e; + }); + }); + }); +}); diff --git a/test/tests/convenient_line.js b/test/tests/convenient_line.js new file mode 100644 index 000000000..d4d51e59c --- /dev/null +++ b/test/tests/convenient_line.js @@ -0,0 +1,69 @@ +var assert = require("assert"); +var repoSetup = require("../utils/repository_setup"); +var promisify = require("promisify-node"); +var fse = promisify(require("fs-extra")); +var path = require("path"); +var local = path.join.bind(path, __dirname); + +describe("ConvenientLine", function() { + var repoPath = local("../repos/convenientLineTest"); + var unicodeLine = "Ťḥð–Žá¹§ ℓỈð“ƒá»‡ çǒðš—ẗảḭṋṦ Ûð§Çð—°á¹“á¸ð”¢\n"; + var asciiLine = "but this line doesn't\n"; + + beforeEach(function() { + var test = this; + + return repoSetup.createRepository(repoPath) + .then(function(repo) { + return repoSetup.commitFileToRepo( + repo, + "fileWithUnicodeChars", + unicodeLine + asciiLine + ); + }) + .then(function(commit) { + return commit.getDiff(); + }) + .then(function(diff) { + return diff[0].patches(); + }) + .then(function(patches) { + return patches[0].hunks(); + }) + .then(function(hunks) { + return hunks[0].lines(); + }) + .then(function(lines) { + test.unicodeLine = lines[0]; + test.asciiLine = lines[1]; + }); + }); + + after(function() { + return fse.remove(repoPath); + }); + + it("can parse the byte length of a unicode string", function() { + var line = this.unicodeLine; + + assert.equal(line.contentLen(), Buffer.byteLength(unicodeLine, "utf8")); + }); + + it("can get a line that contains unicode", function() { + var line = this.unicodeLine; + + assert.equal(line.content(), unicodeLine); + }); + + it("can parse the byte length of a ascii string", function() { + var line = this.asciiLine; + + assert.equal(line.contentLen(), Buffer.byteLength(asciiLine, "utf8")); + }); + + it("can get a line that contains ascii", function() { + var line = this.asciiLine; + + assert.equal(line.content(), asciiLine); + }); +}); diff --git a/test/tests/cred.js b/test/tests/cred.js new file mode 100644 index 000000000..6f0bb46ad --- /dev/null +++ b/test/tests/cred.js @@ -0,0 +1,73 @@ +var assert = require("assert"); +var path = require("path"); +var fs = require("fs"); +var local = path.join.bind(path, __dirname); + +describe("Cred", function() { + var NodeGit = require("../../"); + + var sshPublicKey = local("../id_rsa.pub"); + var sshPrivateKey = local("../id_rsa"); + + it("can create default credentials", function() { + var defaultCreds = NodeGit.Cred.defaultNew(); + assert.ok(defaultCreds instanceof NodeGit.Cred); + }); + + it("can create ssh credentials using passed keys", function() { + var cred = NodeGit.Cred.sshKeyNew( + "username", + sshPublicKey, + sshPrivateKey, + ""); + + assert.ok(cred instanceof NodeGit.Cred); + }); + + it("can create ssh credentials using passed keys in memory", function() { + var publicKeyContents = fs.readFileSync(sshPublicKey, { + encoding: "ascii" + }); + var privateKeyContents = fs.readFileSync(sshPrivateKey, { + encoding: "ascii" + }); + + return NodeGit.Cred.sshKeyMemoryNew( + "username", + publicKeyContents, + privateKeyContents, + "").then(function(cred) { + assert.ok(cred instanceof NodeGit.Cred); + }); + }); + + it("can create credentials using plaintext", function() { + var plaintextCreds = NodeGit.Cred.userpassPlaintextNew + ("username", "password"); + assert.ok(plaintextCreds instanceof NodeGit.Cred); + }); + + it("can create credentials using agent", function() { + var fromAgentCreds = NodeGit.Cred.sshKeyFromAgent + ("username"); + assert.ok(fromAgentCreds instanceof NodeGit.Cred); + }); + + it("can create credentials using username", function() { + return NodeGit.Cred.usernameNew + ("username").then(function(cred) { + assert.ok(cred instanceof NodeGit.Cred); + }); + }); + + it("can return 1 if a username exists", function() { + var plaintextCreds = NodeGit.Cred.userpassPlaintextNew + ("username", "password"); + assert.ok(plaintextCreds.hasUsername() === 1); + }); + + it("can return 0 if a username does not exist", function() { + var defaultCreds = NodeGit.Cred.defaultNew(); + assert.ok(defaultCreds.hasUsername() === 0); + }); +}); diff --git a/test/tests/diff.js b/test/tests/diff.js new file mode 100644 index 000000000..43d14a3ec --- /dev/null +++ b/test/tests/diff.js @@ -0,0 +1,508 @@ +var assert = require("assert"); +var path = require("path"); +var promisify = require("promisify-node"); +var _ = require("lodash"); +var fse = promisify(require("fs-extra")); +var local = path.join.bind(path, __dirname); + +function getLinesFromDiff(diff) { + return diff.patches() + .then(function(patches) { + return Promise.all(_.map(patches, function(patch) { + return patch.hunks(); + })); + }) + .then(function(listsOfHunks) { + var hunks = _.flatten(listsOfHunks); + return Promise.all(_.map(hunks, function(hunk) { + return hunk.lines(); + })); + }) + .then(function(listsOfLines) { + var lines = _.flatten(listsOfLines); + return _.map(lines, function(line) { + return line.content(); + }); + }); +} + +describe("Diff", function() { + var NodeGit = require("../../"); + var Repository = NodeGit.Repository; + var Diff = NodeGit.Diff; + var Blob = NodeGit.Blob; + + var reposPath = local("../repos/workdir"); + var oid = "fce88902e66c72b5b93e75bdb5ae717038b221f6"; + var diffFilename = "wddiff.txt"; + var diffFilepath = local("../repos/workdir", diffFilename); + + var moveFromFile = "README.md"; + var moveToFile = "MOVED_README.md"; + + var moveFromPath = local("../repos/workdir", moveFromFile); + var moveToPath = local("../repos/workdir", moveToFile); + + beforeEach(function() { + var test = this; + + return Repository.open(reposPath).then(function(repository) { + test.repository = repository; + + return repository.refreshIndex(); + }) + .then(function(index) { + test.index = index; + + return test.repository.getBranchCommit("master"); + }) + .then(function(masterCommit) { + return masterCommit.getTree(); + }) + .then(function(tree) { + test.masterCommitTree = tree; + + return test.repository.getCommit(oid); + }) + .then(function(commit) { + test.commit = commit; + + return commit.getDiff(); + }) + .then(function(diff) { + test.diff = diff; + + return fse.writeFile(diffFilepath, "1 line\n2 line\n3 line\n\n4"); + }) + .then(function() { + return fse.move(moveFromPath, moveToPath); + }) + .then(function() { + return Diff.treeToWorkdirWithIndex( + test.repository, + test.masterCommitTree, + { flags: Diff.OPTION.INCLUDE_UNTRACKED } + ); + }) + .then(function(workdirDiff) { + test.workdirDiff = workdirDiff; + }) + .then(function() { + var opts = { + flags: Diff.OPTION.INCLUDE_UNTRACKED | + Diff.OPTION.RECURSE_UNTRACKED_DIRS + }; + + return Diff.indexToWorkdir(test.repository, test.index, opts); + }) + .then(function(diff) { + test.indexToWorkdirDiff = diff; + }) + .then(function() { + return fse.remove(diffFilepath); + }) + .then(function() { + return fse.move(moveToPath, moveFromPath); + }) + .catch(function(e) { + return fse.remove(diffFilepath) + .then(function() { + return Promise.reject(e); + }); + }); + }); + + it("can walk a DiffList", function() { + return this.diff[0].patches() + .then(function(patches) { + var patch = patches[0]; + + assert.equal(patch.oldFile().path(), "README.md"); + assert.equal(patch.newFile().path(), "README.md"); + assert.equal(patch.size(), 1); + assert.ok(patch.isModified()); + + return patch.hunks(); + }) + .then(function(hunks) { + var hunk = hunks[0]; + assert.equal(hunk.size(), 5); + + return hunk.lines(); + }) + .then(function(lines) { + assert.equal(lines[0].origin(), Diff.LINE.CONTEXT); + assert.equal(lines[1].origin(), Diff.LINE.CONTEXT); + assert.equal(lines[2].origin(), Diff.LINE.CONTEXT); + + var oldContent = "__Before submitting a pull request, please ensure " + + "both unit tests and lint checks pass.__\n"; + assert.equal(lines[3].content(), oldContent); + assert.equal(lines[3].origin(), Diff.LINE.DELETION); + assert.equal(lines[3].content().length, oldContent.length); + + var newContent = "__Before submitting a pull request, please ensure " + + "both that you've added unit tests to cover your shiny new code, " + + "and that all unit tests and lint checks pass.__\n"; + assert.equal(lines[4].content(), newContent); + assert.equal(lines[4].origin(), Diff.LINE.ADDITION); + assert.equal(lines[4].content().length, newContent.length); + }); + }); + + it("can diff the workdir with index", function() { + return this.workdirDiff.patches() + .then(function(patches) { + assert.equal(patches.length, 3); + assert(patches[2].isUntracked()); + + var oldFile = patches[2].oldFile(); + assert.equal(oldFile.path(), "wddiff.txt"); + assert.equal(oldFile.size(), 0); + + var newFile = patches[2].newFile(); + assert.equal(newFile.path(), "wddiff.txt"); + assert.equal(newFile.size(), 23); + }); + }); + + it("can resolve individual line changes from the patch hunks", function() { + return this.workdirDiff.patches() + .then(function(patches) { + var result = []; + var hunkPromises = []; + + patches.forEach(function(patch) { + hunkPromises.push(patch.hunks() + .then(function(hunks) { + result = result.concat(hunks); + }) + ); + }); + + return Promise.all(hunkPromises) + .then(function() { + return result; + }); + }) + .then(function(hunks) { + var result = []; + var linePromises = []; + + hunks.forEach(function(hunk) { + linePromises.push(hunk.lines() + .then(function(lines) { + result = result.concat(lines); + }) + ); + }); + + return Promise.all(linePromises) + .then(function() { + return result; + }); + }) + .then(function(lines) { + lines.forEach(function(line) { + assert(/\n/.exec(line.content())); + assert(/\n/.exec(line.rawContent())); + }); + }); + }); + + it("can diff the contents of a file to a string", function(done) { + this.repository.getBranchCommit("master") + .then(function(commit) { + return commit.getEntry("LICENSE"); + }) + .then(function(entry) { + var _entry = entry; + return _entry.getBlob(); + }) + .then(function(blob) { + var buffer = "New Text"; + return Diff.blobToBuffer( + blob, + null, + buffer, + null, + null, + null, + null, + function(delta, hunk, payload) { + assert.equal(hunk.oldStart(), 1); + assert.equal(hunk.oldLines(), 19); + assert.equal(hunk.newStart(), 1); + assert.equal(hunk.newLines(), 1); + assert.equal( + hunk.header().substring(0, hunk.headerLen() - 1), + "@@ -1,19 +1 @@" + ); + done(); + }); + }); + }); + + it("can diff the contents of a file to a string with unicode characters", + function(done) { + var evilString = "Unicode’s fun!\nAnd it’s good for you!\n"; + var buffer = new Buffer(evilString); + var oid = Blob.createFromBuffer(this.repository, buffer, buffer.length); + Blob.lookup(this.repository, oid) + .then(function(blob) { + blob.repo = this.repository; + return Diff.blobToBuffer( + blob, + null, + evilString, + null, + null, + null, + null, + function(delta, hunk, payload) { + assert.fail( + "There aren't any changes so this shouldn't be called."); + done(); + }); + }) + .then(function() { + done(); + }); + }); + + it("can diff with a null tree", function() { + var repo = this.repository; + var tree = this.masterCommitTree; + return Diff.treeToTree(repo, null, tree, null) + .then(function(diff) { + return diff.patches(); + }) + .then(function(patches) { + // Number of patches returned is 84 or 85 depending + // on something unknown at this time. Hopefully we can + // eventually resolve the root cause of the difference. + // https://github.com/nodegit/nodegit/issues/746 + assert.ok(patches.length === 84 || patches.length === 85); + }); + }); + + it("can diff the initial commit of a repository", function() { + var repo = this.repository; + var oid = "99c88fd2ac9c5e385bd1fe119d89c83dce326219"; // First commit + return repo.getCommit(oid) + .then(function(commit) { + return commit.getDiff(); + }) + .then(function(diffs) { + return diffs[0].patches(); + }) + .then(function(patches) { + assert.equal(patches.length, 8); + }); + }); + + it("can diff tree to index", function() { + var repo = this.repository; + var tree = this.masterCommitTree; + var index = this.index; + var opts = { flags: Diff.OPTION.INCLUDE_UNTRACKED }; + + return Diff.treeToIndex(repo, tree, index, opts) + .then(function(diff) { + return diff.patches(); + }) + .then(function(patches) { + assert.equal(patches.length, 0); + }); + }); + + it("can diff index to workdir", function() { + return this.indexToWorkdirDiff.patches() + .then(function(patches) { + assert.equal(patches.length, 3); + }); + }); + + it("can pass undefined pathspec as option to indexToWorkdir", function() { + var test = this; + + return Repository.open(reposPath).then(function(repository) { + test.repository = repository; + + return repository.refreshIndex(); + }) + .then(function(index) { + test.index = index; + + return test.repository.getBranchCommit("master"); + }) + .then(function() { + var opts = { + flags: Diff.OPTION.INCLUDE_UNTRACKED | + Diff.OPTION.RECURSE_UNTRACKED_DIRS, + pathspec: undefined + }; + + // should not segfault + return Diff.indexToWorkdir(test.repository, test.index, opts); + }); + }); + + + it("can merge two commit diffs", function() { + var linesOfFirstDiff; + var linesOfSecondDiff; + var firstDiff = this.diff[0]; + var secondDiff; + var oid = "c88d39e70585199425b111c6a2c7fa7b4bc617ad"; + return this.repository.getCommit(oid) + .then(function(testCommit) { + return testCommit.getDiff(); + }) + .then(function(_secondDiff) { + secondDiff = _secondDiff[0]; + return Promise.all([ + getLinesFromDiff(firstDiff), + getLinesFromDiff(secondDiff) + ]); + }) + .then(function(listOfLines) { + linesOfFirstDiff = listOfLines[0]; + linesOfSecondDiff = listOfLines[1]; + return firstDiff.merge(secondDiff); + }) + .then(function() { + return getLinesFromDiff(firstDiff); + }) + .then(function(linesOfMergedDiff) { + var allDiffLines = _.flatten([ + linesOfFirstDiff, + linesOfSecondDiff + ]); + _.forEach(allDiffLines, function(diffLine) { + assert.ok(_.includes(linesOfMergedDiff, diffLine)); + }); + }); + }); + + describe( + "merge between commit diff and workdir and index diff", function() { + beforeEach(function() { + var test = this; + var diffOptions = new NodeGit.DiffOptions(); + var IGNORE_CASE_FLAG = 1 << 10; + diffOptions.flags = diffOptions.flags |= IGNORE_CASE_FLAG; + return fse.writeFile( + path.join(test.repository.workdir(), "newFile.txt"), "some line\n" + ) + .then(function() { + return test.index.addAll(undefined, undefined, function() { + // ensure that there is no deadlock if we call + // a sync libgit2 function from the callback + test.repository.path(); + + return 0; // confirm add + }); + }) + .then(function() { + return test.repository.getHeadCommit(); + }) + .then(function(headCommit) { + return headCommit.getTree(); + }) + .then(function(headTree) { + return Promise.all([ + Diff.treeToWorkdirWithIndex(test.repository, headTree, diffOptions), + test.commit.getDiffWithOptions(diffOptions) + ]); + }) + .then(function(diffs) { + test.workDirWithIndexDiff = diffs[0]; + // The second item in `diffs` is the commit diff which contains and + // array of diffs, one for each parent + test.commitDiff = diffs[1][0]; + }); + }); + + it("can merge a diff from a commit into a diff from a work dir and index", + function() { + var test = this; + var linesOfWorkDirWithIndexDiff; + var linesOfCommitDiff; + return Promise.all([ + getLinesFromDiff(test.workDirWithIndexDiff), + getLinesFromDiff(test.commitDiff) + ]) + .then(function(linesOfDiffs) { + linesOfWorkDirWithIndexDiff = linesOfDiffs[0]; + linesOfCommitDiff = linesOfDiffs[1]; + return test.workDirWithIndexDiff.merge(test.commitDiff); + }) + .then(function() { + return getLinesFromDiff(test.workDirWithIndexDiff); + }) + .then(function(linesOfMergedDiff) { + var allDiffLines = _.flatten([ + linesOfWorkDirWithIndexDiff, + linesOfCommitDiff + ]); + _.forEach(allDiffLines, function(diffLine) { + assert.ok(_.includes(linesOfMergedDiff, diffLine)); + }); + }); + }); + + it("can merge a diff from a workdir and index into a diff from a commit", + function() { + var test = this; + var linesOfWorkDirWithIndexDiff; + var linesOfCommitDiff; + return Promise.all([ + getLinesFromDiff(test.workDirWithIndexDiff), + getLinesFromDiff(test.commitDiff) + ]) + .then(function(linesOfDiffs) { + linesOfWorkDirWithIndexDiff = linesOfDiffs[0]; + linesOfCommitDiff = linesOfDiffs[1]; + return test.commitDiff.merge(test.workDirWithIndexDiff); + }) + .then(function() { + return getLinesFromDiff(test.commitDiff); + }) + .then(function(linesOfMergedDiff) { + var allDiffLines = _.flatten([ + linesOfWorkDirWithIndexDiff, + linesOfCommitDiff + ]); + _.forEach(allDiffLines, function(diffLine) { + assert.ok(_.includes(linesOfMergedDiff, diffLine)); + }); + }); + }); + }); + + // This wasn't working before. It was only passing because the promise chain + // was broken + it.skip("can find similar files in a diff", function() { + var diff = this.indexToWorkdirDiff; + var opts = { + flags: Diff.FIND.RENAMES | + Diff.FIND.RENAMES_FROM_REWRITES | + Diff.FIND.FOR_UNTRACKED + }; + + return diff.patches() + .then(function(patches) { + assert.equal(patches.length, 3); + + return diff.findSimilar(opts); + }) + .then(function() { + return diff.patches(); + }) + .then(function(patches) { + // Renamed file now treated as one diff, so 3 patches -> 2 + assert.equal(patches.length, 2); + }); + }); +}); diff --git a/test/tests/graph.js b/test/tests/graph.js new file mode 100644 index 000000000..34805cda0 --- /dev/null +++ b/test/tests/graph.js @@ -0,0 +1,64 @@ +var assert = require("assert"); +var path = require("path"); +var local = path.join.bind(path, __dirname); + +describe("Graph", function() { + var NodeGit = require("../../"); + var Repository = NodeGit.Repository; + var Graph = NodeGit.Graph; + + var reposPath = local("../repos/workdir"); + + beforeEach(function() { + var test = this; + + return Repository.open(reposPath) + .then(function(repository) { + test.repository = repository; + }); + }); + + it("can get commits ahead/behind for 2 different commits", function() { + return Graph.aheadBehind( + this.repository, + "32789a79e71fbc9e04d3eff7425e1771eb595150", + "1729c73906bb8467f4095c2f4044083016b4dfde") + .then(function(result) { + assert.equal(result.ahead, 1); + assert.equal(result.behind, 1); + }); + }); + + it("can tell if a commit is a descendant of another", function() { + return Graph.descendantOf( + this.repository, + "32789a79e71fbc9e04d3eff7425e1771eb595150", + "e0aeedcff0584ebe00aed2c03c8ecd10839df908" + ) + .then(function(result) { + assert.equal(result, 1); + }); + }); + + it("can tell if a commit is not a descendant of another", function() { + return Graph.descendantOf( + this.repository, + "1528a019c504c9b5a68dc7d83bb2a887eb2473af", + "32789a79e71fbc9e04d3eff7425e1771eb595150" + ) + .then(function(result) { + assert.equal(result, 0); + }); + }); + + it("descendantOf will error if provided bad commits", function() { + return Graph.descendantOf( + this.repository, + "81b06facd90fe7a6e9bbd9cee59736a79105b7be", + "26744fc697849d370246749b67ac43b792a4af0c" + ) + .catch(function(result) { + assert(~result.message.indexOf("81b06fac")); + }); + }); +}); diff --git a/test/tests/ignore.js b/test/tests/ignore.js new file mode 100644 index 000000000..f834b28d0 --- /dev/null +++ b/test/tests/ignore.js @@ -0,0 +1,33 @@ +var assert = require("assert"); +var path = require("path"); +var local = path.join.bind(path, __dirname); + +describe("Ignore", function() { + var NodeGit = require("../../"); + var Repository = NodeGit.Repository; + var Ignore = NodeGit.Ignore; + + var reposPath = local("../repos/workdir"); + + before(function() { + var test = this; + + return Repository.open(reposPath).then(function(repository) { + test.repository = repository; + }); + }); + + it("can determine if a path is ignored", function() { + function expectIgnoreState(repo, fileName, expected) { + return Ignore.pathIsIgnored(repo, fileName) + .then(function(ignored) { + assert.equal(ignored, expected); + }); + } + + return Promise.all([ + expectIgnoreState(this.repository, ".git", true), + expectIgnoreState(this.repository, "LICENSE", false) + ]); + }); +}); diff --git a/test/tests/index.js b/test/tests/index.js new file mode 100644 index 000000000..aba18fa60 --- /dev/null +++ b/test/tests/index.js @@ -0,0 +1,362 @@ +var assert = require("assert"); +var path = require("path"); +var local = path.join.bind(path, __dirname); +var promisify = require("promisify-node"); +var fse = promisify(require("fs-extra")); + +var writeFile = promisify(function(filename, data, callback) { + return require("fs").writeFile(filename, data, {}, callback); +}); + +describe("Index", function() { + var IndexUtils = require("../utils/index_setup"); + var RepoUtils = require("../utils/repository_setup"); + var NodeGit = require("../../"); + var Repository = NodeGit.Repository; + + var reposPath = local("../repos/workdir"); + + beforeEach(function() { + var test = this; + + return Repository.open(reposPath) + .then(function(repo) { + test.repository = repo; + return repo.refreshIndex(); + }) + .then(function(index) { + test.index = index; + }); + }); + + after(function() { + this.index.clear(); + }); + + it("can get the index of a repo and examine entries", function() { + var entries = this.index.entries(); + + assert.equal(entries[0].path, ".gitignore"); + }); + + it("can add all entries to the index", function() { + var repo = this.repository; + var index = this.index; + var fileContent = { + newFile1: "this has some content", + newFile2: "and this will have more content" + }; + var fileNames = Object.keys(fileContent); + var test = this; + var addCallbacksCount = 0; + + return Promise.all(fileNames.map(function(fileName) { + return writeFile( + path.join(repo.workdir(), fileName), + fileContent[fileName]); + })) + .then(function() { + return index.addAll(undefined, undefined, function() { + addCallbacksCount++; + // ensure that there is no deadlock if we call + // a sync libgit2 function from the callback + test.repository.path(); + + return 0; // confirm add + }); + }) + .then(function() { + assert.equal(addCallbacksCount, 2); + + var newFiles = index.entries().filter(function(entry) { + return ~fileNames.indexOf(entry.path); + }); + + assert.equal(newFiles.length, 2); + }) + .then(function() { + return Promise.all(fileNames.map(function(fileName) { + return fse.remove(path.join(repo.workdir(), fileName)); + })); + }) + .then(function() { + return index.clear(); + }); + }); + + it("can remove entries from the index", function() { + var repo = this.repository; + var index = this.index; + var fileContent = { + newFile1: "this has some content", + newFile2: "and this will have more content", + differentFileName: "this has a different name and shouldn't be deleted" + }; + var fileNames = Object.keys(fileContent); + var removeCallbacksCount = 0; + + return Promise.all(fileNames.map(function(fileName) { + return writeFile( + path.join(repo.workdir(), fileName), + fileContent[fileName]); + })) + .then(function() { + return index.addAll(); + }) + .then(function() { + var newFiles = index.entries().filter(function(entry) { + return ~fileNames.indexOf(entry.path); + }); + + assert.equal(newFiles.length, 3); + + return index.removeAll("newFile*", function() { + removeCallbacksCount++; + + return 0; // confirm remove + }); + }) + .then(function() { + assert.equal(removeCallbacksCount, 2); + + var newFiles = index.entries().filter(function(entry) { + return ~fileNames.indexOf(entry.path); + }); + + assert.equal(newFiles.length, 1); + }) + .then(function() { + return Promise.all(fileNames.map(function(fileName) { + return fse.remove(path.join(repo.workdir(), fileName)); + })); + }) + .then(function() { + return index.clear(); + }); + }); + + it("can update entries in the index", function() { + var repo = this.repository; + var index = this.index; + var fileContent = { + newFile1: "this has some content", + newFile2: "and this will have more content" + }; + var fileNames = Object.keys(fileContent); + var updateCallbacksCount = 0; + + return Promise.all(fileNames.map(function(fileName) { + return writeFile( + path.join(repo.workdir(), fileName), + fileContent[fileName]); + })) + .then(function() { + return index.addAll(); + }) + .then(function() { + var newFiles = index.entries().filter(function(entry) { + return ~fileNames.indexOf(entry.path); + }); + + assert.equal(newFiles.length, 2); + + return fse.remove(path.join(repo.workdir(), fileNames[0])); + }) + .then(function() { + return index.updateAll("newFile*", function() { + updateCallbacksCount++; + + return 0; // confirm update + }); + }) + .then(function() { + assert.equal(updateCallbacksCount, 1); + + var newFiles = index.entries().filter(function(entry) { + return ~fileNames.indexOf(entry.path); + }); + + assert.equal(newFiles.length, 1); + return fse.remove(path.join(repo.workdir(), fileNames[1])); + }); + }); + + it("can get a conflict from the index", function() { + var fileName = "everyonesFile.txt"; + var rebaseReposPath = local("../repos/rebase"); + var ourBranchName = "ours"; + var theirBranchName = "theirs"; + + var baseFileContent = "How do you feel about Toll Roads?\n"; + var ourFileContent = "I like Toll Roads. I have an EZ-Pass!\n"; + var theirFileContent = "I'm skeptical about Toll Roads\n"; + + var ourSignature = NodeGit.Signature.create + ("Ron Paul", "RonPaul@TollRoadsRBest.info", 123456789, 60); + var theirSignature = NodeGit.Signature.create + ("Greg Abbott", "Gregggg@IllTollYourFace.us", 123456789, 60); + + var repository; + var ourCommit; + var ourBranch; + var theirBranch; + + return Repository.init(rebaseReposPath, 0) + .then(function(repo) { + repository = repo; + return fse.writeFile(path.join(repository.workdir(), fileName), + baseFileContent); + }) + .then(function() { + return RepoUtils.addFileToIndex(repository, fileName); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "044704f62399fecbe22da6d7d47b14e52625630e"); + + return repository.createCommit("HEAD", ourSignature, + ourSignature, "initial commit", oid, []); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "80111c46ac73b857a3493b24c81df08639b5de99"); + + return repository.getCommit(commitOid).then(function(commit) { + ourCommit = commit; + }).then(function() { + return repository.createBranch(ourBranchName, commitOid) + .then(function(branch) { + ourBranch = branch; + return repository.createBranch(theirBranchName, commitOid); + }); + }); + }) + .then(function(branch) { + theirBranch = branch; + return fse.writeFile(path.join(repository.workdir(), fileName), + baseFileContent + theirFileContent); + }) + .then(function() { + return RepoUtils.addFileToIndex(repository, fileName); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "b826e989aca7647bea64810f0a2a38acbbdd4c1a"); + + return repository.createCommit(theirBranch.name(), theirSignature, + theirSignature, "they made a commit", oid, [ourCommit]); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "b3c355bb606ec7da87174dfa1a0b0c0e3dc97bc0"); + + return fse.writeFile(path.join(repository.workdir(), fileName), + baseFileContent + ourFileContent); + }) + .then(function() { + return RepoUtils.addFileToIndex(repository, fileName); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "e7fe41bf7c0c28766887a63ffe2f03f624276fbe"); + + return repository.createCommit(ourBranch.name(), ourSignature, + ourSignature, "we made a commit", oid, [ourCommit]); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "28cfeb17f66132edb3c4dacb7ff38e8dd48a1844"); + + var opts = { + checkoutStrategy: NodeGit.Checkout.STRATEGY.FORCE + }; + + return NodeGit.Checkout.head(repository, opts); + }) + .then(function() { + return repository.mergeBranches(ourBranchName, theirBranchName); + }) + .then(function(commit) { + assert.fail(commit, undefined, + "The index should have been thrown due to merge conflicts"); + }) + .catch(function(index) { + assert.ok(index); + assert.ok(index.hasConflicts()); + + return index.conflictGet(fileName); + }) + .then(function(conflict) { + var promises = []; + + promises.push(repository.getBlob(conflict.ancestor_out.id) + .then(function(blob) { + assert.equal(blob.toString(), baseFileContent); + })); + + promises.push(repository.getBlob(conflict.our_out.id) + .then(function(blob) { + assert.equal(blob.toString(), baseFileContent + ourFileContent); + })); + + promises.push(repository.getBlob(conflict.their_out.id) + .then(function(blob) { + assert.equal(blob.toString(), baseFileContent + theirFileContent); + })); + + return Promise.all(promises); + }); + }); + + it("can add a conflict to the index", function() { + var repo; + var repoPath = local("../repos/index"); + var ourBranchName = "ours"; + var theirBranchName = "theirs"; + var fileName = "testFile.txt"; + var ancestorIndexEntry; + var ourIndexEntry; + var theirIndexEntry; + + return RepoUtils.createRepository(repoPath) + .then(function(_repo) { + repo = _repo; + return IndexUtils.createConflict( + repo, + ourBranchName, + theirBranchName, + fileName + ); + }) + .then(function(index) { + assert.ok(index.hasConflicts()); + return index.conflictGet(fileName); + }) + .then(function(indexEntries) { + // Store all indexEntries for conflict + ancestorIndexEntry = indexEntries.ancestor_out; + ourIndexEntry = indexEntries.our_out; + theirIndexEntry = indexEntries.their_out; + + // Stage conflicted file + return RepoUtils.addFileToIndex(repo, fileName); + }) + .then(function() { + return repo.index(); + }) + .then(function(index) { + assert.ok(!index.hasConflicts()); + return index.conflictAdd( + ancestorIndexEntry, + ourIndexEntry, + theirIndexEntry + ); + }) + .then(function() { + return repo.index(); + }) + .then(function(index) { + assert(index.hasConflicts()); + }); + }); +}); diff --git a/test/tests/merge.js b/test/tests/merge.js new file mode 100644 index 000000000..3a63b0ced --- /dev/null +++ b/test/tests/merge.js @@ -0,0 +1,1301 @@ +var assert = require("assert"); +var path = require("path"); +var promisify = require("promisify-node"); +var fse = promisify(require("fs-extra")); +var local = path.join.bind(path, __dirname); + +fse.ensureDir = promisify(fse.ensureDir); + +describe("Merge", function() { + var NodeGit = require("../../"); + var RepoUtils = require("../utils/repository_setup"); + + var reposPath = local("../repos/merge"); + var ourBranchName = "ours"; + var theirBranchName = "theirs"; + + beforeEach(function() { + var test = this; + return RepoUtils.createRepository(reposPath) + .then(function(repo) { + test.repository = repo; + }); + }); + + it("can cleanly merge 2 files", function() { + var ourFileName = "ourNewFile.txt"; + var theirFileName = "theirNewFile.txt"; + + var ourFileContent = "I like Toll Roads. I have an EZ-Pass!"; + var theirFileContent = "I'm skeptical about Toll Roads"; + + var ourSignature = NodeGit.Signature.create + ("Ron Paul", "RonPaul@TollRoadsRBest.info", 123456789, 60); + var theirSignature = NodeGit.Signature.create + ("Greg Abbott", "Gregggg@IllTollYourFace.us", 123456789, 60); + + var repository = this.repository; + var ourCommit; + var theirCommit; + var ourBranch; + var theirBranch; + + return fse.writeFile(path.join(repository.workdir(), ourFileName), + ourFileContent) + // Load up the repository index and make our initial commit to HEAD + .then(function() { + return repository.refreshIndex(); + }) + .then(function(index) { + return index.addByPath(ourFileName) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "11ead82b1135b8e240fb5d61e703312fb9cc3d6a"); + + return repository.createCommit("HEAD", ourSignature, + ourSignature, "we made a commit", oid, []); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "91a183f87842ebb7a9b08dad8bc2473985796844"); + + return repository.getCommit(commitOid).then(function(commit) { + ourCommit = commit; + }).then(function() { + return repository.createBranch(ourBranchName, commitOid) + .then(function(branch) { + ourBranch = branch; + return repository.createBranch(theirBranchName, commitOid); + }); + }); + }) + .then(function(branch) { + theirBranch = branch; + return fse.writeFile(path.join(repository.workdir(), theirFileName), + theirFileContent); + }) + .then(function() { + return repository.refreshIndex(); + }) + .then(function(index) { + return index.addByPath(theirFileName) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "76631cb5a290dafe2959152626bb90f2a6d8ec94"); + + return repository.createCommit(theirBranch.name(), theirSignature, + theirSignature, "they made a commit", oid, [ourCommit]); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "0e9231d489b3f4303635fc4b0397830da095e7e7"); + + return repository.getCommit(commitOid).then(function(commit) { + theirCommit = commit; + }); + }) + .then(function() { + return NodeGit.Merge.commits(repository, ourCommit, theirCommit); + }) + .then(function(index) { + assert(!index.hasConflicts()); + return index.writeTreeTo(repository); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "76631cb5a290dafe2959152626bb90f2a6d8ec94"); + + return repository.createCommit(ourBranch.name(), ourSignature, + ourSignature, "we merged their commit", oid, + [ourCommit, theirCommit]); + }) + .then(function(commitId) { + assert.equal(commitId.toString(), + "eedee554af34dd4001d8abc799cb55bb7e56a58b"); + }); + }); + + it("can fast-forward using the convenience method", function() { + var ourFileName = "ourNewFile.txt"; + var theirFileName = "theirNewFile.txt"; + + var ourFileContent = "I like Toll Roads. I have an EZ-Pass!"; + var theirFileContent = "I'm skeptical about Toll Roads"; + + var ourSignature = NodeGit.Signature.create + ("Ron Paul", "RonPaul@TollRoadsRBest.info", 123456789, 60); + var theirSignature = NodeGit.Signature.create + ("Greg Abbott", "Gregggg@IllTollYourFace.us", 123456789, 60); + + var repository = this.repository; + var ourCommit; + var theirCommit; + var ourBranch; + var theirBranch; + + return fse.writeFile( + path.join(repository.workdir(), ourFileName), + ourFileContent) + // Load up the repository index and make our initial commit to HEAD + .then(function() { + return repository.refreshIndex(); + }) + .then(function(index) { + return index.addByPath(ourFileName) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "11ead82b1135b8e240fb5d61e703312fb9cc3d6a"); + + return repository.createCommit("HEAD", ourSignature, + ourSignature, "we made a commit", oid, []); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "91a183f87842ebb7a9b08dad8bc2473985796844"); + + return repository.getCommit(commitOid).then(function(commit) { + ourCommit = commit; + }).then(function() { + return repository.createBranch(ourBranchName, commitOid) + .then(function(branch) { + ourBranch = branch; + return repository.createBranch(theirBranchName, commitOid); + }); + }); + }) + .then(function(branch) { + theirBranch = branch; + return fse.writeFile(path.join(repository.workdir(), theirFileName), + theirFileContent); + }) + .then(function() { + return repository.refreshIndex(); + }) + .then(function(index) { + return index.addByPath(theirFileName) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "76631cb5a290dafe2959152626bb90f2a6d8ec94"); + + return repository.createCommit(theirBranch.name(), theirSignature, + theirSignature, "they made a commit", oid, [ourCommit]); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "0e9231d489b3f4303635fc4b0397830da095e7e7"); + + return repository.getCommit(commitOid).then(function(commit) { + theirCommit = commit; + }); + }) + .then(function() { + var opts = {checkoutStrategy: NodeGit.Checkout.STRATEGY.FORCE}; + return repository.checkoutBranch(ourBranchName, opts); + }) + .then(function() { + return repository.mergeBranches( + ourBranchName, + theirBranchName, + ourSignature); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "0e9231d489b3f4303635fc4b0397830da095e7e7"); + + return repository.getBranchCommit(ourBranchName) + .then(function(branchCommit) { + assert.equal(oid.toString(), branchCommit.toString()); + }); + }) + .then(function() { + return repository.getStatus(); + }) + .then(function(statuses) { + // make sure we didn't change the index + assert.equal(statuses.length, 0); + }); + }); + + it("can merge --no-ff a fast-forward using the convenience method", + function() { + var ourFileName = "ourNewFile.txt"; + var theirFileName = "theirNewFile.txt"; + + var ourFileContent = "I like Toll Roads. I have an EZ-Pass!"; + var theirFileContent = "I'm skeptical about Toll Roads"; + + var ourSignature = NodeGit.Signature.create + ("Ron Paul", "RonPaul@TollRoadsRBest.info", 123456789, 60); + var theirSignature = NodeGit.Signature.create + ("Greg Abbott", "Gregggg@IllTollYourFace.us", 123456789, 60); + + var repository = this.repository; + var ourCommit; + var theirCommit; + var ourBranch; + var theirBranch; + + return fse.writeFile( + path.join(repository.workdir(), ourFileName), + ourFileContent) + // Load up the repository index and make our initial commit to HEAD + .then(function() { + return repository.refreshIndex(); + }) + .then(function(index) { + return index.addByPath(ourFileName) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "11ead82b1135b8e240fb5d61e703312fb9cc3d6a"); + + return repository.createCommit("HEAD", ourSignature, + ourSignature, "we made a commit", oid, []); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "91a183f87842ebb7a9b08dad8bc2473985796844"); + + return repository.getCommit(commitOid).then(function(commit) { + ourCommit = commit; + }).then(function() { + return repository.createBranch(ourBranchName, commitOid) + .then(function(branch) { + ourBranch = branch; + return repository.createBranch(theirBranchName, commitOid); + }); + }); + }) + .then(function(branch) { + theirBranch = branch; + return fse.writeFile(path.join(repository.workdir(), theirFileName), + theirFileContent); + }) + .then(function() { + return repository.refreshIndex(); + }) + .then(function(index) { + return index.addByPath(theirFileName) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "76631cb5a290dafe2959152626bb90f2a6d8ec94"); + + return repository.createCommit(theirBranch.name(), theirSignature, + theirSignature, "they made a commit", oid, [ourCommit]); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "0e9231d489b3f4303635fc4b0397830da095e7e7"); + + return repository.getCommit(commitOid).then(function(commit) { + theirCommit = commit; + }); + }) + .then(function() { + var opts = { + checkoutStrategy: NodeGit.Checkout.STRATEGY.SAFE | + NodeGit.Checkout.STRATEGY.RECREATE_MISSING + }; + return repository.checkoutBranch(ourBranchName, opts); + }) + .then(function() { + return repository.mergeBranches( + ourBranchName, + theirBranchName, + ourSignature, + NodeGit.Merge.PREFERENCE.NO_FASTFORWARD); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "6806d22d2b6c0095b29dc5ec51829caeb67861f1"); + + return repository.getBranchCommit(ourBranchName) + .then(function(branchCommit) { + assert.equal(oid.toString(), branchCommit.toString()); + }); + }) + .then(function() { + return repository.getStatus(); + }) + .then(function(statuses) { + // make sure we didn't change the index + assert.equal(statuses.length, 0); + }); + }); + + it("can merge --no-ff a non-fast-forward using the convenience method", + function() { + var initialFileName = "initialFile.txt"; + var ourFileName = "ourNewFile.txt"; + var theirFileName = "theirNewFile.txt"; + + var initialFileContent = "I'd like to drive somewhere"; + var ourFileContent = "I like Toll Roads. I have an EZ-Pass!"; + var theirFileContent = "I'm skeptical about Toll Roads"; + + var ourSignature = NodeGit.Signature.create + ("Ron Paul", "RonPaul@TollRoadsRBest.info", 123456789, 60); + var theirSignature = NodeGit.Signature.create + ("Greg Abbott", "Gregggg@IllTollYourFace.us", 123456789, 60); + + var repository = this.repository; + var initialCommit; + var ourCommit; + var theirCommit; + var ourBranch; + var theirBranch; + + return fse.writeFile( + path.join(repository.workdir(), initialFileName), + initialFileContent) + // Load up the repository index and make our initial commit to HEAD + .then(function() { + return repository.refreshIndex(); + }) + .then(function(index) { + return index.addByPath(initialFileName) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "21a553813e2f670815b649eef51eeadb253a5d0c"); + + return repository.createCommit("HEAD", ourSignature, + ourSignature, "initial commit", oid, []); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "af66a9c799a10a23319ee4318c8bb2021521f539"); + + return repository.getCommit(commitOid).then(function(commit) { + initialCommit = commit; + }).then(function() { + return repository.createBranch(ourBranchName, commitOid) + .then(function(branch) { + ourBranch = branch; + return repository.createBranch(theirBranchName, commitOid); + }); + }); + }) + .then(function(branch) { + theirBranch = branch; + }) + .then(function() { + return fse.writeFile(path.join(repository.workdir(), ourFileName), + ourFileContent); + }) + .then(function() { + return repository.refreshIndex(); + }) + .then(function(index) { + return index.addByPath(ourFileName) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "af60aa06b3537f75b427f6268a130c842c84a137"); + + return repository.createCommit(ourBranch.name(), ourSignature, + ourSignature, "we made a commit", oid, [initialCommit]); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "7ce31c05427659986d50abfb90c8f7db88ef4fa1"); + + return repository.getCommit(commitOid).then(function(commit) { + ourCommit = commit; + }); + }) + .then(function() { + return fse.writeFile(path.join(repository.workdir(), theirFileName), + theirFileContent); + }) + .then(function() { + return repository.refreshIndex(); + }) + .then(function(index) { + return index.addByPath(theirFileName) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "f007361737a2ca00a0e80fc2daf55064463173b4"); + + return repository.createCommit(theirBranch.name(), theirSignature, + theirSignature, "they made a commit", oid, [initialCommit]); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "b588f0eef1809226f8f7db542940749da15ae1de"); + + return repository.getCommit(commitOid).then(function(commit) { + theirCommit = commit; + }); + }) + .then(function() { + var opts = {checkoutStrategy: NodeGit.Checkout.STRATEGY.FORCE}; + return repository.checkoutBranch(ourBranchName, opts); + }) + .then(function() { + return repository.mergeBranches( + ourBranchName, + theirBranchName, + ourSignature, + NodeGit.Merge.PREFERENCE.NO_FASTFORWARD); + }) + .then(function(commitId) { + assert.equal(commitId.toString(), + "5384feb481d9c29081b3a0c1478fcc24a3953efa"); + }) + .then(function() { + return repository.getStatus(); + }) + .then(function(statuses) { + // make sure we didn't change the index + assert.equal(statuses.length, 0); + }); + }); + + it("can merge --ff-only a fast-forward using the convenience method", + function() { + var ourFileName = "ourNewFile.txt"; + var theirFileName = "theirNewFile.txt"; + + var ourFileContent = "I like Toll Roads. I have an EZ-Pass!"; + var theirFileContent = "I'm skeptical about Toll Roads"; + + var ourSignature = NodeGit.Signature.create + ("Ron Paul", "RonPaul@TollRoadsRBest.info", 123456789, 60); + var theirSignature = NodeGit.Signature.create + ("Greg Abbott", "Gregggg@IllTollYourFace.us", 123456789, 60); + + var repository = this.repository; + var ourCommit; + var theirCommit; + var ourBranch; + var theirBranch; + + return fse.writeFile( + path.join(repository.workdir(), ourFileName), + ourFileContent) + // Load up the repository index and make our initial commit to HEAD + .then(function() { + return repository.refreshIndex(); + }) + .then(function(index) { + return index.addByPath(ourFileName) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "11ead82b1135b8e240fb5d61e703312fb9cc3d6a"); + + return repository.createCommit("HEAD", ourSignature, + ourSignature, "we made a commit", oid, []); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "91a183f87842ebb7a9b08dad8bc2473985796844"); + + return repository.getCommit(commitOid).then(function(commit) { + ourCommit = commit; + }).then(function() { + return repository.createBranch(ourBranchName, commitOid) + .then(function(branch) { + ourBranch = branch; + return repository.createBranch(theirBranchName, commitOid); + }); + }); + }) + .then(function(branch) { + theirBranch = branch; + return fse.writeFile(path.join(repository.workdir(), theirFileName), + theirFileContent); + }) + .then(function() { + return repository.refreshIndex(); + }) + .then(function(index) { + return index.addByPath(theirFileName) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "76631cb5a290dafe2959152626bb90f2a6d8ec94"); + + return repository.createCommit(theirBranch.name(), theirSignature, + theirSignature, "they made a commit", oid, [ourCommit]); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "0e9231d489b3f4303635fc4b0397830da095e7e7"); + + return repository.getCommit(commitOid).then(function(commit) { + theirCommit = commit; + }); + }) + .then(function() { + var opts = {checkoutStrategy: NodeGit.Checkout.STRATEGY.FORCE}; + return repository.checkoutBranch(ourBranchName, opts); + }) + .then(function() { + return repository.mergeBranches( + ourBranchName, + theirBranchName, + ourSignature, + NodeGit.Merge.PREFERENCE.FASTFORWARD_ONLY); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "0e9231d489b3f4303635fc4b0397830da095e7e7"); + + return repository.getBranchCommit(ourBranchName) + .then(function(branchCommit) { + assert.equal(oid.toString(), branchCommit.toString()); + }); + }) + .then(function() { + return repository.getStatus(); + }) + .then(function(statuses) { + // make sure we didn't change the index + assert.equal(statuses.length, 0); + }); + }); + + it("doesn't merge --ff-only a non-fast-forward using the convenience method", + function() { + var initialFileName = "initialFile.txt"; + var ourFileName = "ourNewFile.txt"; + var theirFileName = "theirNewFile.txt"; + + var initialFileContent = "I'd like to drive somewhere"; + var ourFileContent = "I like Toll Roads. I have an EZ-Pass!"; + var theirFileContent = "I'm skeptical about Toll Roads"; + + var ourSignature = NodeGit.Signature.create + ("Ron Paul", "RonPaul@TollRoadsRBest.info", 123456789, 60); + var theirSignature = NodeGit.Signature.create + ("Greg Abbott", "Gregggg@IllTollYourFace.us", 123456789, 60); + + var repository = this.repository; + var initialCommit; + var ourCommit; + var theirCommit; + var ourBranch; + var theirBranch; + + return fse.writeFile( + path.join(repository.workdir(), initialFileName), + initialFileContent) + // Load up the repository index and make our initial commit to HEAD + .then(function() { + return repository.refreshIndex(); + }) + .then(function(index) { + return index.addByPath(initialFileName) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "21a553813e2f670815b649eef51eeadb253a5d0c"); + + return repository.createCommit("HEAD", ourSignature, + ourSignature, "initial commit", oid, []); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "af66a9c799a10a23319ee4318c8bb2021521f539"); + + return repository.getCommit(commitOid).then(function(commit) { + initialCommit = commit; + }).then(function() { + return repository.createBranch(ourBranchName, commitOid) + .then(function(branch) { + ourBranch = branch; + return repository.createBranch(theirBranchName, commitOid); + }); + }); + }) + .then(function(branch) { + theirBranch = branch; + }) + .then(function() { + return fse.writeFile(path.join(repository.workdir(), ourFileName), + ourFileContent); + }) + .then(function() { + return repository.refreshIndex(); + }) + .then(function(index) { + return index.addByPath(ourFileName) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "af60aa06b3537f75b427f6268a130c842c84a137"); + + return repository.createCommit(ourBranch.name(), ourSignature, + ourSignature, "we made a commit", oid, [initialCommit]); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "7ce31c05427659986d50abfb90c8f7db88ef4fa1"); + + return repository.getCommit(commitOid).then(function(commit) { + ourCommit = commit; + }); + }) + .then(function() { + return fse.writeFile(path.join(repository.workdir(), theirFileName), + theirFileContent); + }) + .then(function() { + return repository.refreshIndex(); + }) + .then(function(index) { + return index.addByPath(theirFileName) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "f007361737a2ca00a0e80fc2daf55064463173b4"); + + return repository.createCommit(theirBranch.name(), theirSignature, + theirSignature, "they made a commit", oid, [initialCommit]); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "b588f0eef1809226f8f7db542940749da15ae1de"); + + return repository.getCommit(commitOid).then(function(commit) { + theirCommit = commit; + }); + }) + .then(function() { + var opts = {checkoutStrategy: NodeGit.Checkout.STRATEGY.FORCE}; + return repository.checkoutBranch(ourBranchName, opts); + }) + .then(function() { + return repository.mergeBranches( + ourBranchName, + theirBranchName, + ourSignature, + NodeGit.Merge.PREFERENCE.FASTFORWARD_ONLY); + }) + .then(function(commitId) { + assert.equal(commitId.toString(), + "7ce31c05427659986d50abfb90c8f7db88ef4fa1"); + }) + .then(function() { + return repository.getStatus(); + }) + .then(function(statuses) { + // make sure we didn't change the index + assert.equal(statuses.length, 0); + }); + }); + + it("can merge cleanly using the convenience method", function() { + var initialFileName = "initialFile.txt"; + var ourFileName = "ourNewFile.txt"; + var theirFileName = "theirNewFile.txt"; + + var initialFileContent = "I'd like to drive somewhere"; + var ourFileContent = "I like Toll Roads. I have an EZ-Pass!"; + var theirFileContent = "I'm skeptical about Toll Roads"; + + var ourSignature = NodeGit.Signature.create + ("Ron Paul", "RonPaul@TollRoadsRBest.info", 123456789, 60); + var theirSignature = NodeGit.Signature.create + ("Greg Abbott", "Gregggg@IllTollYourFace.us", 123456789, 60); + + var repository = this.repository; + var initialCommit; + var ourCommit; + var theirCommit; + var ourBranch; + var theirBranch; + + return fse.writeFile( + path.join(repository.workdir(), initialFileName), + initialFileContent) + // Load up the repository index and make our initial commit to HEAD + .then(function() { + return repository.refreshIndex(); + }) + .then(function(index) { + return index.addByPath(initialFileName) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "21a553813e2f670815b649eef51eeadb253a5d0c"); + + return repository.createCommit("HEAD", ourSignature, + ourSignature, "initial commit", oid, []); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "af66a9c799a10a23319ee4318c8bb2021521f539"); + + return repository.getCommit(commitOid).then(function(commit) { + initialCommit = commit; + }).then(function() { + return repository.createBranch(ourBranchName, commitOid) + .then(function(branch) { + ourBranch = branch; + return repository.createBranch(theirBranchName, commitOid); + }); + }); + }) + .then(function(branch) { + theirBranch = branch; + }) + .then(function() { + return fse.writeFile(path.join(repository.workdir(), ourFileName), + ourFileContent); + }) + .then(function() { + return repository.refreshIndex(); + }) + .then(function(index) { + return index.addByPath(ourFileName) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "af60aa06b3537f75b427f6268a130c842c84a137"); + + return repository.createCommit(ourBranch.name(), ourSignature, + ourSignature, "we made a commit", oid, [initialCommit]); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "7ce31c05427659986d50abfb90c8f7db88ef4fa1"); + + return repository.getCommit(commitOid).then(function(commit) { + ourCommit = commit; + }); + }) + .then(function() { + return fse.writeFile(path.join(repository.workdir(), theirFileName), + theirFileContent); + }) + .then(function() { + return repository.refreshIndex(); + }) + .then(function(index) { + return index.addByPath(theirFileName) + .then(function() { + return index.write(); + }) + .then(function(){ + return index.writeTree(); + }); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "f007361737a2ca00a0e80fc2daf55064463173b4"); + + return repository.createCommit(theirBranch.name(), theirSignature, + theirSignature, "they made a commit", oid, [initialCommit]); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "b588f0eef1809226f8f7db542940749da15ae1de"); + + return repository.getCommit(commitOid).then(function(commit) { + theirCommit = commit; + }); + }) + .then(function() { + return repository.mergeBranches(ourBranchName, theirBranchName, + ourSignature); + }) + .then(function(commitId) { + assert.equal(commitId.toString(), + "5384feb481d9c29081b3a0c1478fcc24a3953efa"); + }); + }); + + it("can merge 2 branchs with conflicts on a single file", function () { + var baseFileContent = "All Bobs are created equal. ish.\n"; + var ourFileContent = "Big Bobs are best, IMHO.\n"; + var theirFileContent = "Nobody expects the small Bobquisition!\n"; + var finalFileContent = + "Big Bobs are beautiful, and the small are unexpected!\n"; + + var baseSignature = NodeGit.Signature.create + ("Peaceful Bob", "justchill@bob.net", 123456789, 60); + var ourSignature = NodeGit.Signature.create + ("Big Bob", "impressive@bob.net", 123456789, 60); + var theirSignature = NodeGit.Signature.create + ("Small Bob", "underestimated@bob.net", 123456789, 60); + + var repository = this.repository; + var baseCommit; + var baseCommitOid; + var ourCommit; + var theirCommit; + var ourBranch; + var theirBranch; + var fileName = "newFile.txt"; + + return fse.writeFile(path.join(repository.workdir(), fileName), + baseFileContent) + .then(function() { + return repository.refreshIndex(); + }) + .then(function(index) { + return index.addByPath(fileName) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "ea2f6521fb8097a881f43796946ac1603e1f4d75"); + + return repository.createCommit("HEAD", baseSignature, + baseSignature, "bobs are all ok", oid, []); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "a9b202f7612273fb3a68f718304298704eaeb735"); + baseCommitOid = commitOid; + + return repository.getCommit(commitOid).then(function(commit) { + baseCommit = commit; + }); + }) + .then(function() { + return repository.createBranch(ourBranchName, baseCommitOid) + .then(function(branch) { + ourBranch = branch; + }); + }) + .then(function() { + return repository.createBranch(theirBranchName, baseCommitOid) + .then(function(branch) { + theirBranch = branch; + }); + }) + .then(function() { + return fse.writeFile(path.join(repository.workdir(), fileName), + ourFileContent); + }) + .then(function() { + return repository.refreshIndex(); + }) + .then(function(index) { + return index.addByPath(fileName) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "c39b1e38b09085856cec7e7ff33e90f5a537d8a5"); + + return repository.createCommit(ourBranch.name(), ourSignature, + ourSignature, "lol big bobs :yesway:", oid, [baseCommit]); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "935a89c09ad757a9dde2c0257f6f1e379f71816f"); + + return repository.getCommit(commitOid).then(function(commit) { + ourCommit = commit; + }); + }) + .then(function() { + return fse.writeFile(path.join(repository.workdir(), fileName), + theirFileContent); + }) + .then(function() { + return repository.refreshIndex(); + }) + .then(function(index) { + return index.addByPath(fileName) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "d1a894a9a4a8c820eb66c82cdd7e6b76c8f713cb"); + + return repository.createCommit(theirBranch.name(), theirSignature, + theirSignature, "lol big bobs :poop:", oid, [baseCommit]); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "bebb9ec2e0684c7cb7c1e1601c7d5a8f52b8b123"); + + return repository.getCommit(commitOid).then(function(commit) { + theirCommit = commit; + }); + }) + .then(function() { + return NodeGit.Reference.lookup(repository, "HEAD") + .then(function(head) { + return head.symbolicSetTarget(ourBranch.name(), ""); + }); + }) + .then(function() { + return NodeGit.Merge.commits(repository, ourCommit, theirCommit, null); + }) + .then(function(index) { + assert(index.hasConflicts()); + fse.writeFileSync(path.join(repository.workdir(), fileName), + finalFileContent); + }) + .then(function() { + return repository.refreshIndex(); + }) + .then(function(index) { + return index.addByPath(fileName) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "b1cd49a27cd33b99ab6dad2fb82b3174812a8b47"); + + return repository.createCommit(ourBranch.name(), baseSignature, + baseSignature, "Stop this bob sized fued", oid, + [ourCommit, theirCommit]); + }) + .then(function(commitId) { + assert.equal(commitId.toString(), + "49014ccabf5125f9b69316acde36f891dfdb8b5c"); + }); + }); + + it("leaves repo in MERGE state after a standard merge with conflicts fails", + function() { + var fileName = "everyonesFile.txt"; + + var baseFileContent = "How do you feel about Toll Roads?\n"; + var ourFileContent = "I like Toll Roads. I have an EZ-Pass!\n"; + var theirFileContent = "I'm skeptical about Toll Roads\n"; + + var expectedConflictedFileContent = + "How do you feel about Toll Roads?\n" + + "<<<<<<< HEAD\n" + + "I like Toll Roads. I have an EZ-Pass!\n" + + "=======\n" + + "I'm skeptical about Toll Roads\n" + + ">>>>>>> theirs\n"; + + var conflictSolvedFileContent = + "How do you feel about Toll Roads?\n" + + "He's skeptical about Toll Roads,\n" + + "but I like Toll Roads. I have an EZ-Pass!\n"; + + var ourSignature = NodeGit.Signature.create + ("Ron Paul", "RonPaul@TollRoadsRBest.info", 123456789, 60); + var theirSignature = NodeGit.Signature.create + ("Greg Abbott", "Gregggg@IllTollYourFace.us", 123456789, 60); + + var repository = this.repository; + var initialCommit; + var ourBranch; + var theirBranch; + + var repoGitPath = repository.path(); + if (!~repoGitPath.indexOf("/.git")) { + repoGitPath = path.join(repoGitPath, ".git"); + } + + return fse.writeFile(path.join(repository.workdir(), fileName), + baseFileContent) + .then(function() { + return repository.refreshIndex(); + }) + .then(function(index) { + return index.addByPath(fileName) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "044704f62399fecbe22da6d7d47b14e52625630e"); + + return repository.createCommit("HEAD", ourSignature, + ourSignature, "initial commit", oid, []); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "80111c46ac73b857a3493b24c81df08639b5de99"); + + return repository.getCommit(commitOid).then(function(commit) { + initialCommit = commit; + }).then(function() { + return repository.createBranch(ourBranchName, commitOid) + .then(function(branch) { + ourBranch = branch; + return repository.createBranch(theirBranchName, commitOid); + }); + }); + }) + .then(function(branch) { + theirBranch = branch; + return fse.writeFile(path.join(repository.workdir(), fileName), + baseFileContent + theirFileContent); + }) + .then(function() { + return repository.refreshIndex(); + }) + .then(function(index) { + return index.addByPath(fileName) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "b826e989aca7647bea64810f0a2a38acbbdd4c1a"); + + return repository.createCommit(theirBranch.name(), theirSignature, + theirSignature, "they made a commit", oid, [initialCommit]); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "b3c355bb606ec7da87174dfa1a0b0c0e3dc97bc0"); + + return fse.writeFile(path.join(repository.workdir(), fileName), + baseFileContent + ourFileContent); + }) + .then(function() { + return repository.refreshIndex(); + }) + .then(function(index) { + return index.addByPath(fileName) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "e7fe41bf7c0c28766887a63ffe2f03f624276fbe"); + + return repository.createCommit(ourBranch.name(), ourSignature, + ourSignature, "we made a commit", oid, [initialCommit]); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "28cfeb17f66132edb3c4dacb7ff38e8dd48a1844"); + + //return repository.getCommit(commitOid) + + var opts = { + checkoutStrategy: NodeGit.Checkout.STRATEGY.FORCE + }; + + return repository.checkoutBranch(ourBranchName, opts); + }) + .then(function() { + return repository.getHeadCommit(); + }) + .then(function(commit) { + assert.equal(commit.id().toString(), + "28cfeb17f66132edb3c4dacb7ff38e8dd48a1844"); + + return repository.getReference(theirBranchName); + }) + .then(function(theirRef) { + return NodeGit.AnnotatedCommit.fromRef(repository, theirRef); + }) + .then(function(theirAnnotatedCommit) { + return NodeGit.Merge(repository, theirAnnotatedCommit); + }) + .then(function(result) { + assert.equal(result, 0); + + assert.equal(repository.state(), + NodeGit.Repository.STATE.MERGE); + // verify the convenience method + assert.ok(repository.isMerging()); + + assert.ok(fse.existsSync(path.join(repoGitPath, "MERGE_HEAD"))); + assert.ok(fse.existsSync(path.join(repoGitPath, "MERGE_MSG"))); + assert.ok(fse.existsSync(path.join(repoGitPath, "MERGE_MODE"))); + + return fse.readFile(path.join(repoGitPath, "MERGE_HEAD"), "utf-8"); + }) + .then(function(mergeHeadContents) { + assert.equal(mergeHeadContents, + "b3c355bb606ec7da87174dfa1a0b0c0e3dc97bc0\n"); + + return fse.readFile(path.join(repository.workdir(), fileName), "utf-8"); + }) + .then(function(fileContent) { + assert.equal(fileContent, expectedConflictedFileContent); + + return fse.writeFile(path.join(repository.workdir(), fileName), + conflictSolvedFileContent); + }) + .then(function() { + return repository.refreshIndex(); + }) + .then(function(index) { + return index.addByPath(fileName) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }); + }) + .then(function(oid) { + return Promise.all([ + repository.getBranchCommit(ourBranchName), + repository.getBranchCommit("MERGE_HEAD") + ]) + .then(function(commits) { + var msg = fse.readFileSync(path.join(repoGitPath, "MERGE_MSG"), + "utf-8"); + assert.ok(msg); + + return repository.createCommit(ourBranch.name(), ourSignature, + ourSignature, msg, oid, [commits[0], commits[1]]); + }); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "03ba156a7a1660f179b6b2dbc6a542fcf88d022d"); + + // merge isn't cleaned up automatically + assert.ok(fse.existsSync(path.join(repoGitPath, "MERGE_HEAD"))); + assert.ok(fse.existsSync(path.join(repoGitPath, "MERGE_MSG"))); + assert.ok(fse.existsSync(path.join(repoGitPath, "MERGE_MODE"))); + + assert.equal(repository.stateCleanup(), 0); + + assert.ok(!fse.existsSync(path.join(repoGitPath, "MERGE_HEAD"))); + assert.ok(!fse.existsSync(path.join(repoGitPath, "MERGE_MSG"))); + assert.ok(!fse.existsSync(path.join(repoGitPath, "MERGE_MODE"))); + + assert.equal(repository.state(), + NodeGit.Repository.STATE.NONE); + // verify the convenience method + assert.ok(repository.isDefaultState()); + }); + }); +}); diff --git a/test/tests/note.js b/test/tests/note.js new file mode 100644 index 000000000..c240dcd09 --- /dev/null +++ b/test/tests/note.js @@ -0,0 +1,69 @@ +var assert = require("assert"); +var path = require("path"); +var local = path.join.bind(path, __dirname); + +describe("Note", function() { + var NodeGit = require("../../"); + var Note = NodeGit.Note; + var Signature = NodeGit.Signature; + var reposPath = local("../repos/workdir"); + + beforeEach(function() { + var test = this; + + return NodeGit.Repository.open(reposPath).then(function(repository) { + test.repository = repository; + + return repository.getMasterCommit().then(function(commit) { + test.commit = commit; + }); + }); + }); + + it("can be created", function() { + var sha = this.commit.id(); + var sig = Signature.create("John", "john@doe.com", Date.now(), 0); + var noteRef = "refs/notes/commits"; + + return Note.create(this.repository, noteRef, sig, sig, sha, "Testing!", 1); + }); + + it("can be read", function() { + var sha = this.commit.id(); + var noteRef = "refs/notes/commits"; + + return Note.read(this.repository, noteRef, sha).then(function(note) { + assert.equal(note.message(), "Testing!"); + }); + }); + + it("can iterate all notes", function() { + var test = this; + var noteRef = "refs/notes/commits"; + var ref = null; + + return Note.foreach(this.repository, noteRef, function(blobId, objectId) { + ref = objectId; + }).then(function() { + return NodeGit.Note.read(test.repository, noteRef, ref) + .then(function(note) { + assert.equal(note.message(), "Testing!"); + }); + }); + }); + + it("can be removed", function(done) { + var test = this; + var sha = this.commit.id(); + var noteRef = "refs/notes/commits"; + var sig = Signature.create("John", "john@doe.com", Date.now(), 0); + + return Note.remove(this.repository, noteRef, sig, sig, sha) + .then(function() { + return Note.read(test.repository, noteRef, sha).catch(function(ex) { + assert.equal(ex.message, "Note could not be found"); + done(); + }); + }); + }); +}); diff --git a/test/tests/odb.js b/test/tests/odb.js new file mode 100644 index 000000000..67a00c1bc --- /dev/null +++ b/test/tests/odb.js @@ -0,0 +1,61 @@ +var assert = require("assert"); +var path = require("path"); +var local = path.join.bind(path, __dirname); + +describe("Odb", function() { + var NodeGit = require("../../"); + var Repository = NodeGit.Repository; + var Oid = NodeGit.Oid; + var Obj = NodeGit.Object; + + var reposPath = local("../repos/workdir"); + + beforeEach(function() { + var test = this; + + return Repository.open(reposPath).then(function(repo) { + test.repo = repo; + + return repo; + }).then(function(repo) { + return repo.odb(); + }).then(function(odb) { + test.odb = odb; + + return odb; + }); + }); + + it("can read raw objects directly from the odb using an OID", function() { + var oid = Oid.fromString("32789a79e71fbc9e04d3eff7425e1771eb595150"); + + return this.odb.read(oid) + .then(function (object) { + assert.equal(object.type(), Obj.TYPE.COMMIT); + }); + }); + + it("can read objects directly from the odb using a string", function() { + return this.odb.read("32789a79e71fbc9e04d3eff7425e1771eb595150") + .then(function (object) { + assert.equal(object.type(), Obj.TYPE.COMMIT); + }); + }); + + it("can write raw objects to git", function() { + var obj = "test data"; + var odb = this.odb; + + return odb.write(obj, obj.length, Obj.TYPE.BLOB) + .then(function(oid) { + assert.ok(oid instanceof Oid); + + return odb.read(oid); + }) + .then(function(object) { + assert.equal(object.type(), Obj.TYPE.BLOB); + assert.equal(object.toString(), obj); + assert.equal(object.size(), obj.length); + }); + }); +}); diff --git a/test/tests/oid.js b/test/tests/oid.js new file mode 100644 index 000000000..4c2fbcdad --- /dev/null +++ b/test/tests/oid.js @@ -0,0 +1,95 @@ +var assert = require("assert"); +var path = require("path"); +var local = path.join.bind(path, __dirname); + +var leakTest = require("../utils/leak_test"); + +describe("Oid", function() { + var NodeGit = require("../../"); + var Oid = NodeGit.Oid; + + var oid = "fce88902e66c72b5b93e75bdb5ae717038b221f6"; + + before(function() { + this.oid = Oid.fromString(oid); + }); + + it("can convert a string to an oid", function() { + assert.ok(this.oid instanceof Oid); + }); + + it("can convert an oid to a string", function() { + var string = this.oid.allocfmt(); + + assert.equal(string, oid); + assert.equal(this.oid.toString(), oid); + }); + + it("provides a custom inspect method to improve debugging", function() { + var inspect = this.oid.inspect(); + + assert.equal(inspect, "[Oid " + oid + "]"); + }); + + it("can convert strings to oids in parameters", function() { + return NodeGit.Repository.open(local("../repos/workdir")) + .then(function(repo) { + var revwalk = repo.createRevWalk(); + revwalk.sorting(NodeGit.Revwalk.SORT.TIME); + + revwalk.push(oid); + + return revwalk.getCommits(1); + }) + .then(function(commits) { + assert.equal(commits[0].toString(), oid); + }); + }); + + it("can compare two identical oids", function() { + assert.equal(this.oid.cmp(this.oid), 0); + }); + + it("can compare two different oids", function() { + var oid2 = Oid.fromString("13c633665257696a3800b0a39ff636b4593f918f"); + assert.notEqual(this.oid.cmp(oid2), 0); + }); + + it("can compare the first chunk of two identical oids", function() { + assert.equal(this.oid.ncmp(this.oid, 5), 0); + }); + + it("can compare the first chunk of two different oids", function() { + var oid2 = Oid.fromString("13c633665257696a3800b0a39ff636b4593f918f"); + assert.notEqual(this.oid.ncmp(oid2, 5), 0); + }); + + it("can check the equality of two identical oids", function() { + assert(this.oid.equal(this.oid)); + }); + + it("can check the equality of two different oids", function() { + var oid2 = Oid.fromString("13c633665257696a3800b0a39ff636b4593f918f"); + assert(!this.oid.equal(oid2)); + }); + + it("does not leak constructed Oid", function() { + return leakTest(Oid, function() { + return Promise.resolve( + Oid.fromString("13c633665257696a3800b0a39ff636b4593f918f") + ); + }); + }); + + it("does not leak owned Oid", function() { + return leakTest(Oid, function() { + return NodeGit.Repository.open(local("../repos/workdir")) + .then(function(repo) { + return NodeGit.Commit.lookup(repo, oid); + }) + .then(function(commit) { + return commit.id(); + }); + }); + }); +}); diff --git a/test/tests/packbuilder.js b/test/tests/packbuilder.js new file mode 100644 index 000000000..cfd193238 --- /dev/null +++ b/test/tests/packbuilder.js @@ -0,0 +1,25 @@ +var assert = require("assert"); +var path = require("path"); +var local = path.join.bind(path, __dirname); + +describe("Packbuilder", function() { + var NodeGit = require("../../"); + var Repository = NodeGit.Repository; + var Packbuilder = NodeGit.Packbuilder; + + var reposPath = local("../repos/workdir"); + + beforeEach(function() { + var test = this; + + return Repository.open(reposPath).then(function(repository) { + test.repository = repository; + }); + }); + + it("can be initialized", function() { + var packBuilder = Packbuilder.create(this.repository); + + assert(packBuilder instanceof Packbuilder); + }); +}); diff --git a/test/tests/patch.js b/test/tests/patch.js new file mode 100644 index 000000000..e426d2b63 --- /dev/null +++ b/test/tests/patch.js @@ -0,0 +1,66 @@ +var assert = require("assert"); +var path = require("path"); +var local = path.join.bind(path, __dirname); + +describe("Patch", function() { + var NodeGit = require("../../"); + var Repository = NodeGit.Repository; + + var reposPath = local("../repos/workdir"); + var oid = "fce88902e66c72b5b93e75bdb5ae717038b221f6"; + + beforeEach(function() { + var test = this; + + return Repository.open(reposPath).then(function(repository) { + test.repository = repository; + + return repository.refreshIndex(); + }) + .then(function(index) { + test.index = index; + + return test.repository.getBranchCommit("master"); + }) + .then(function(masterCommit) { + return masterCommit.getTree(); + }) + .then(function(tree) { + test.masterCommitTree = tree; + + return test.repository.getCommit(oid); + }) + .then(function(commit) { + test.commit = commit; + + return commit.getDiff(); + }) + .then(function(diff) { + test.diff = diff; + + return diff[0].patches(); + }) + .catch(function(e) { + return Promise.reject(e); + }); + }); + + it("retrieve the line stats of a patch", function() { + return this.diff[0].patches() + .then(function(patches) { + var patch = patches[0]; + var lineStats = patch.lineStats(); + + assert.equal(patch.oldFile().path(), "README.md"); + assert.equal(patch.newFile().path(), "README.md"); + assert.equal(patch.size(), 1); + assert.ok(patch.isModified()); + assert.equal(lineStats.total_context, 3); + assert.equal(lineStats.total_additions, 1); + assert.equal(lineStats.total_deletions, 1); + }); + + }); + + +}); diff --git a/test/tests/pathspec.js b/test/tests/pathspec.js new file mode 100644 index 000000000..28741b068 --- /dev/null +++ b/test/tests/pathspec.js @@ -0,0 +1,44 @@ +var assert = require("assert"); + +describe("Pathspec", function() { + var NodeGit = require("../../"); + var Pathspec = NodeGit.Pathspec; + + it("can accept just about anything against a * pathspec", function() { + var pathspec = Pathspec.create("*"); + + assert.equal(pathspec.matchesPath(0, "burritoooo"), 1); + assert.equal(pathspec.matchesPath(0, "bob/ted/yoghurt.mp3"), 1); + }); + + it("can take a * in an array", function() { + var pathspec = Pathspec.create("*"); + + assert.equal(pathspec.matchesPath(0, "burritoooo"), 1); + assert.equal(pathspec.matchesPath(0, "bob/ted/yoghurt.mp3"), 1); + }); + + it("can take a single file", function() { + var pathspec = Pathspec.create(["myDir/burritoSupreme.mp4"]); + + assert.equal(pathspec.matchesPath(0, "myDir/burritoSupreme.mp4"), 1); + assert.equal(pathspec.matchesPath(0, "bob/ted/yoghurt.mp3"), 0); + }); + + it("can take files in an array", function() { + var pathspec = Pathspec.create(["gwendoline.txt", "sausolito.ogg"]); + + assert.equal(pathspec.matchesPath(0, "gwendoline.txt"), 1); + assert.equal(pathspec.matchesPath(0, "sausolito.ogg"), 1); + assert.equal(pathspec.matchesPath(0, "sausolito.txt"), 0); + }); + + it("can handle dirs", function() { + var pathspec = Pathspec.create(["myDir/", "bob.js"]); + + assert.equal(pathspec.matchesPath(0, "bob.js"), 1); + assert.equal(pathspec.matchesPath(0, "myDir/bob2.js"), 1); + assert.equal(pathspec.matchesPath(0, "bob2.js"), 0); + assert.equal(pathspec.matchesPath(0, "herDir/bob.js"), 0); + }); +}); diff --git a/test/tests/rebase.js b/test/tests/rebase.js new file mode 100644 index 000000000..f40d73621 --- /dev/null +++ b/test/tests/rebase.js @@ -0,0 +1,1076 @@ +var assert = require("assert"); +var path = require("path"); +var local = path.join.bind(path, __dirname); +var promisify = require("promisify-node"); +var fse = promisify(require("fs-extra")); + +describe("Rebase", function() { + var NodeGit = require("../../"); + var RepoUtils = require("../utils/repository_setup"); + + var repoPath = local("../repos/rebase"); + var ourBranchName = "ours"; + var theirBranchName = "theirs"; + + var removeFileFromIndex = function(repository, fileName) { + return repository.refreshIndex() + .then(function(index) { + return index.removeByPath(fileName) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }); + }); + }; + + beforeEach(function() { + var test = this; + return RepoUtils.createRepository(repoPath) + .then(function(repo) { + test.repository = repo; + }); + }); + + after(function() { + return fse.remove(repoPath); + }); + + it("can cleanly fast-forward via rebase", function() { + var ourFileName = "ourNewFile.txt"; + var theirFileName = "theirNewFile.txt"; + + var ourFileContent = "I like Toll Roads. I have an EZ-Pass!"; + var theirFileContent = "I'm skeptical about Toll Roads"; + + var ourSignature = NodeGit.Signature.create + ("Ron Paul", "RonPaul@TollRoadsRBest.info", 123456789, 60); + var theirSignature = NodeGit.Signature.create + ("Greg Abbott", "Gregggg@IllTollYourFace.us", 123456789, 60); + + var repository = this.repository; + var ourCommit; + var theirBranch; + + return fse.writeFile(path.join(repository.workdir(), ourFileName), + ourFileContent) + // Load up the repository index and make our initial commit to HEAD + .then(function() { + return RepoUtils.addFileToIndex(repository, ourFileName); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "11ead82b1135b8e240fb5d61e703312fb9cc3d6a"); + + return repository.createCommit("HEAD", ourSignature, + ourSignature, "we made a commit", oid, []); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "91a183f87842ebb7a9b08dad8bc2473985796844"); + + return repository.getCommit(commitOid).then(function(commit) { + ourCommit = commit; + }).then(function() { + return repository.createBranch(ourBranchName, commitOid) + .then(function(branch) { + return repository.createBranch(theirBranchName, commitOid); + }); + }); + }) + .then(function(branch) { + theirBranch = branch; + return fse.writeFile(path.join(repository.workdir(), theirFileName), + theirFileContent); + }) + .then(function() { + return RepoUtils.addFileToIndex(repository, theirFileName); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "76631cb5a290dafe2959152626bb90f2a6d8ec94"); + + return repository.createCommit(theirBranch.name(), theirSignature, + theirSignature, "they made a commit", oid, [ourCommit]); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "0e9231d489b3f4303635fc4b0397830da095e7e7"); + }) + .then(function() { + // unstage changes so that we can begin a rebase + return removeFileFromIndex(repository, theirFileName); + }) + .then(function() { + return Promise.all([ + repository.getReference(ourBranchName), + repository.getReference(theirBranchName) + ]); + }) + .then(function(refs) { + assert.equal(refs.length, 2); + + return Promise.all([ + NodeGit.AnnotatedCommit.fromRef(repository, refs[0]), + NodeGit.AnnotatedCommit.fromRef(repository, refs[1]) + ]); + }) + .then(function(annotatedCommits) { + assert.equal(annotatedCommits.length, 2); + + var ourAnnotatedCommit = annotatedCommits[0]; + var theirAnnotatedCommit = annotatedCommits[1]; + + assert.equal(ourAnnotatedCommit.id().toString(), + "91a183f87842ebb7a9b08dad8bc2473985796844"); + assert.equal(theirAnnotatedCommit.id().toString(), + "0e9231d489b3f4303635fc4b0397830da095e7e7"); + + return NodeGit.Rebase.init(repository, ourAnnotatedCommit, + theirAnnotatedCommit, theirAnnotatedCommit); + }) + .then(function(rebase) { + assert.equal(rebase.operationEntrycount(), 0); + + return rebase.finish(ourSignature); + }) + .then(function() { + return repository.getBranchCommit(ourBranchName); + }) + .then(function(commit) { + assert.equal(commit.id().toString(), + "0e9231d489b3f4303635fc4b0397830da095e7e7"); + }); + }); + + it("can cleanly rebase a branch onto another branch", function() { + var baseFileName = "baseNewFile.txt"; + var ourFileName = "ourNewFile.txt"; + var theirFileName = "theirNewFile.txt"; + + var baseFileContent = "How do you feel about Toll Roads?"; + var ourFileContent = "I like Toll Roads. I have an EZ-Pass!"; + var theirFileContent = "I'm skeptical about Toll Roads"; + + var ourSignature = NodeGit.Signature.create + ("Ron Paul", "RonPaul@TollRoadsRBest.info", 123456789, 60); + var theirSignature = NodeGit.Signature.create + ("Greg Abbott", "Gregggg@IllTollYourFace.us", 123456789, 60); + + var repository = this.repository; + var ourCommit; + var ourBranch; + var theirBranch; + var rebase; + + return fse.writeFile(path.join(repository.workdir(), baseFileName), + baseFileContent) + // Load up the repository index and make our initial commit to HEAD + .then(function() { + return RepoUtils.addFileToIndex(repository, baseFileName); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "b5cdc109d437c4541a13fb7509116b5f03d5039a"); + + return repository.createCommit("HEAD", ourSignature, + ourSignature, "initial commit", oid, []); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "be03abdf0353d05924c53bebeb0e5bb129cda44a"); + + return repository.getCommit(commitOid).then(function(commit) { + ourCommit = commit; + }).then(function() { + return repository.createBranch(ourBranchName, commitOid) + .then(function(branch) { + ourBranch = branch; + return repository.createBranch(theirBranchName, commitOid); + }); + }); + }) + .then(function(branch) { + theirBranch = branch; + return fse.writeFile(path.join(repository.workdir(), theirFileName), + theirFileContent); + }) + .then(function() { + return RepoUtils.addFileToIndex(repository, theirFileName); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "be5f0fd38a39a67135ad68921c93cd5c17fefb3d"); + + return repository.createCommit(theirBranch.name(), theirSignature, + theirSignature, "they made a commit", oid, [ourCommit]); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "e9ebd92f2f4778baf6fa8e92f0c68642f931a554"); + + return removeFileFromIndex(repository, theirFileName); + }) + .then(function() { + return fse.remove(path.join(repository.workdir(), theirFileName)); + }) + .then(function() { + return fse.writeFile(path.join(repository.workdir(), ourFileName), + ourFileContent); + }) + .then(function() { + return RepoUtils.addFileToIndex(repository, ourFileName); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "77867fc0bfeb3f80ab18a78c8d53aa3a06207047"); + + return repository.createCommit(ourBranch.name(), ourSignature, + ourSignature, "we made a commit", oid, [ourCommit]); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "e7f37ee070837052937e24ad8ba66f6d83ae7941"); + + return removeFileFromIndex(repository, ourFileName); + }) + .then(function() { + return fse.remove(path.join(repository.workdir(), ourFileName)); + }) + .then(function() { + return repository.checkoutBranch(ourBranchName); + }) + .then(function() { + return Promise.all([ + repository.getReference(ourBranchName), + repository.getReference(theirBranchName) + ]); + }) + .then(function(refs) { + assert.equal(refs.length, 2); + + return Promise.all([ + NodeGit.AnnotatedCommit.fromRef(repository, refs[0]), + NodeGit.AnnotatedCommit.fromRef(repository, refs[1]) + ]); + }) + .then(function(annotatedCommits) { + assert.equal(annotatedCommits.length, 2); + + var ourAnnotatedCommit = annotatedCommits[0]; + var theirAnnotatedCommit = annotatedCommits[1]; + + assert.equal(ourAnnotatedCommit.id().toString(), + "e7f37ee070837052937e24ad8ba66f6d83ae7941"); + assert.equal(theirAnnotatedCommit.id().toString(), + "e9ebd92f2f4778baf6fa8e92f0c68642f931a554"); + + return NodeGit.Rebase.init(repository, ourAnnotatedCommit, + theirAnnotatedCommit, null); + }) + .then(function(newRebase) { + rebase = newRebase; + + // there should only be 1 rebase operation to perform + assert.equal(rebase.operationEntrycount(), 1); + + return rebase.next(); + }) + .then(function(rebaseOperation) { + assert.equal(rebaseOperation.type(), + NodeGit.RebaseOperation.REBASE_OPERATION.PICK); + assert.equal(rebaseOperation.id().toString(), + "e7f37ee070837052937e24ad8ba66f6d83ae7941"); + + return rebase.commit(null, ourSignature); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "b937100ee0ea17ef20525306763505a7fe2be29e"); + + // git_rebase_operation_current returns the index of the rebase + // operation that was last applied, so after the first operation, it + // should be 0. + assert.equal(rebase.operationCurrent(), 0); + + return rebase.finish(ourSignature, {}); + }) + .then(function(result) { + assert.equal(result, 0); + + return repository.getBranchCommit(ourBranchName); + }) + .then(function(commit) { + // verify that the "ours" branch has moved to the correct place + assert.equal(commit.id().toString(), + "b937100ee0ea17ef20525306763505a7fe2be29e"); + + return commit.parent(0); + }) + .then(function(commit) { + // verify that we are on top of "their commit" + assert.equal(commit.id().toString(), + "e9ebd92f2f4778baf6fa8e92f0c68642f931a554"); + }); + }); + + it("can rebase 2 branches with conflicts on a single file", function() { + var fileName = "everyonesFile.txt"; + + var baseFileContent = "How do you feel about Toll Roads?\n"; + var ourFileContent = "I like Toll Roads. I have an EZ-Pass!\n"; + var theirFileContent = "I'm skeptical about Toll Roads\n"; + + var expectedConflictedFileContent = + "How do you feel about Toll Roads?\n" + + "<<<<<<< theirs\n" + + "I'm skeptical about Toll Roads\n" + + "=======\n" + + "I like Toll Roads. I have an EZ-Pass!\n" + + ">>>>>>> we made a commit\n"; + + var conflictSolvedFileContent = + "How do you feel about Toll Roads?\n" + + "He's skeptical about Toll Roads,\n" + + "but I like Toll Roads. I have an EZ-Pass!\n"; + + var ourSignature = NodeGit.Signature.create + ("Ron Paul", "RonPaul@TollRoadsRBest.info", 123456789, 60); + var theirSignature = NodeGit.Signature.create + ("Greg Abbott", "Gregggg@IllTollYourFace.us", 123456789, 60); + + var repository = this.repository; + var ourCommit; + var ourBranch; + var theirBranch; + var rebase; + + return fse.writeFile(path.join(repository.workdir(), fileName), + baseFileContent) + .then(function() { + return RepoUtils.addFileToIndex(repository, fileName); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "044704f62399fecbe22da6d7d47b14e52625630e"); + + return repository.createCommit("HEAD", ourSignature, + ourSignature, "initial commit", oid, []); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "80111c46ac73b857a3493b24c81df08639b5de99"); + + return repository.getCommit(commitOid).then(function(commit) { + ourCommit = commit; + }).then(function() { + return repository.createBranch(ourBranchName, commitOid) + .then(function(branch) { + ourBranch = branch; + return repository.createBranch(theirBranchName, commitOid); + }); + }); + }) + .then(function(branch) { + theirBranch = branch; + return fse.writeFile(path.join(repository.workdir(), fileName), + baseFileContent + theirFileContent); + }) + .then(function() { + return RepoUtils.addFileToIndex(repository, fileName); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "b826e989aca7647bea64810f0a2a38acbbdd4c1a"); + + return repository.createCommit(theirBranch.name(), theirSignature, + theirSignature, "they made a commit", oid, [ourCommit]); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "b3c355bb606ec7da87174dfa1a0b0c0e3dc97bc0"); + + return fse.writeFile(path.join(repository.workdir(), fileName), + baseFileContent + ourFileContent); + }) + .then(function() { + return RepoUtils.addFileToIndex(repository, fileName); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "e7fe41bf7c0c28766887a63ffe2f03f624276fbe"); + + return repository.createCommit(ourBranch.name(), ourSignature, + ourSignature, "we made a commit", oid, [ourCommit]); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "28cfeb17f66132edb3c4dacb7ff38e8dd48a1844"); + + var opts = { + checkoutStrategy: NodeGit.Checkout.STRATEGY.FORCE + }; + + return NodeGit.Checkout.head(repository, opts); + }) + .then(function() { + return Promise.all([ + repository.getReference(ourBranchName), + repository.getReference(theirBranchName) + ]); + }) + .then(function(refs) { + assert.equal(refs.length, 2); + + return Promise.all([ + NodeGit.AnnotatedCommit.fromRef(repository, refs[0]), + NodeGit.AnnotatedCommit.fromRef(repository, refs[1]) + ]); + }) + .then(function(annotatedCommits) { + assert.equal(annotatedCommits.length, 2); + + var ourAnnotatedCommit = annotatedCommits[0]; + var theirAnnotatedCommit = annotatedCommits[1]; + + assert.equal(ourAnnotatedCommit.id().toString(), + "28cfeb17f66132edb3c4dacb7ff38e8dd48a1844"); + assert.equal(theirAnnotatedCommit.id().toString(), + "b3c355bb606ec7da87174dfa1a0b0c0e3dc97bc0"); + + return NodeGit.Rebase.init(repository, ourAnnotatedCommit, + theirAnnotatedCommit, null); + }) + .then(function(newRebase) { + rebase = newRebase; + + // there should only be 1 rebase operation to perform + assert.equal(rebase.operationEntrycount(), 1); + + return rebase.next(); + }) + .then(function(rebaseOperation) { + assert.equal(rebaseOperation.type(), + NodeGit.RebaseOperation.REBASE_OPERATION.PICK); + assert.equal(rebaseOperation.id().toString(), + "28cfeb17f66132edb3c4dacb7ff38e8dd48a1844"); + + return repository.refreshIndex() + .then(function(index) { + assert.ok(index.hasConflicts()); + }); + }) + .then(function() { + return fse.readFile(path.join(repository.workdir(), fileName), "utf8") + .then(function(fileContent) { + assert.equal(fileContent, expectedConflictedFileContent); + + return fse.writeFile(path.join(repository.workdir(), fileName), + conflictSolvedFileContent); + }); + }) + .then(function() { + return RepoUtils.addFileToIndex(repository, fileName); + }) + .then(function(oid) { + return repository.refreshIndex() + .then(function(index) { + assert.ok(!index.hasConflicts()); + + return rebase.commit(null, ourSignature); + }); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "ef6d0e95167435b3d58f51ab165948c72f6f94b6"); + + return rebase.finish(ourSignature); + }) + .then(function(result) { + assert.equal(result, 0); + + return repository.getBranchCommit(ourBranchName); + }) + .then(function(commit) { + // verify that the "ours" branch has moved to the correct place + assert.equal(commit.id().toString(), + "ef6d0e95167435b3d58f51ab165948c72f6f94b6"); + + return commit.parent(0); + }) + .then(function(commit) { + // verify that we are on top of "their commit" + assert.equal(commit.id().toString(), + "b3c355bb606ec7da87174dfa1a0b0c0e3dc97bc0"); + }); + }); + + it("can abort an in-progress rebase", function() { + var baseFileName = "baseNewFile.txt"; + var ourFileName = "ourNewFile.txt"; + var theirFileName = "theirNewFile.txt"; + + var baseFileContent = "How do you feel about Toll Roads?"; + var ourFileContent = "I like Toll Roads. I have an EZ-Pass!"; + var theirFileContent = "I'm skeptical about Toll Roads"; + + var ourSignature = NodeGit.Signature.create + ("Ron Paul", "RonPaul@TollRoadsRBest.info", 123456789, 60); + var theirSignature = NodeGit.Signature.create + ("Greg Abbott", "Gregggg@IllTollYourFace.us", 123456789, 60); + + var repository = this.repository; + var ourCommit; + var ourBranch; + var theirBranch; + var rebase; + + return fse.writeFile(path.join(repository.workdir(), baseFileName), + baseFileContent) + // Load up the repository index and make our initial commit to HEAD + .then(function() { + return RepoUtils.addFileToIndex(repository, baseFileName); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "b5cdc109d437c4541a13fb7509116b5f03d5039a"); + + return repository.createCommit("HEAD", ourSignature, + ourSignature, "initial commit", oid, []); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "be03abdf0353d05924c53bebeb0e5bb129cda44a"); + + return repository.getCommit(commitOid).then(function(commit) { + ourCommit = commit; + }).then(function() { + return repository.createBranch(ourBranchName, commitOid) + .then(function(branch) { + ourBranch = branch; + return repository.createBranch(theirBranchName, commitOid); + }); + }); + }) + .then(function(branch) { + theirBranch = branch; + return fse.writeFile(path.join(repository.workdir(), theirFileName), + theirFileContent); + }) + .then(function() { + return RepoUtils.addFileToIndex(repository, theirFileName); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "be5f0fd38a39a67135ad68921c93cd5c17fefb3d"); + + return repository.createCommit(theirBranch.name(), theirSignature, + theirSignature, "they made a commit", oid, [ourCommit]); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "e9ebd92f2f4778baf6fa8e92f0c68642f931a554"); + + return removeFileFromIndex(repository, theirFileName); + }) + .then(function() { + return fse.remove(path.join(repository.workdir(), theirFileName)); + }) + .then(function() { + return fse.writeFile(path.join(repository.workdir(), ourFileName), + ourFileContent); + }) + .then(function() { + return RepoUtils.addFileToIndex(repository, ourFileName); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "77867fc0bfeb3f80ab18a78c8d53aa3a06207047"); + + return repository.createCommit(ourBranch.name(), ourSignature, + ourSignature, "we made a commit", oid, [ourCommit]); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "e7f37ee070837052937e24ad8ba66f6d83ae7941"); + + return removeFileFromIndex(repository, ourFileName); + }) + .then(function() { + return fse.remove(path.join(repository.workdir(), ourFileName)); + }) + .then(function() { + return repository.checkoutBranch(ourBranchName); + }) + .then(function() { + return Promise.all([ + repository.getReference(ourBranchName), + repository.getReference(theirBranchName) + ]); + }) + .then(function(refs) { + assert.equal(refs.length, 2); + + return Promise.all([ + NodeGit.AnnotatedCommit.fromRef(repository, refs[0]), + NodeGit.AnnotatedCommit.fromRef(repository, refs[1]) + ]); + }) + .then(function(annotatedCommits) { + assert.equal(annotatedCommits.length, 2); + + var ourAnnotatedCommit = annotatedCommits[0]; + var theirAnnotatedCommit = annotatedCommits[1]; + + assert.equal(ourAnnotatedCommit.id().toString(), + "e7f37ee070837052937e24ad8ba66f6d83ae7941"); + assert.equal(theirAnnotatedCommit.id().toString(), + "e9ebd92f2f4778baf6fa8e92f0c68642f931a554"); + + return NodeGit.Rebase.init(repository, ourAnnotatedCommit, + theirAnnotatedCommit, null); + }) + .then(function(newRebase) { + rebase = newRebase; + + // there should only be 1 rebase operation to perform + assert.equal(rebase.operationEntrycount(), 1); + + return rebase.next(); + }) + .then(function(rebaseOperation) { + assert.equal(rebaseOperation.type(), + NodeGit.RebaseOperation.REBASE_OPERATION.PICK); + assert.equal(rebaseOperation.id().toString(), + "e7f37ee070837052937e24ad8ba66f6d83ae7941"); + + return rebase.commit(null, ourSignature); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "b937100ee0ea17ef20525306763505a7fe2be29e"); + + return repository.getBranchCommit("HEAD") + .then(function(commit) { + // verify that HEAD is on the rebased commit + assert.equal(commit.id().toString(), commitOid.toString()); + }); + }) + .then(function() { + return rebase.abort(ourSignature); + }) + .then(function() { + return NodeGit.Rebase.open(repository) + .then(function(existingRebase) { + assert.fail(existingRebase, undefined, + "There should not be a rebase in progress"); + }) + .catch(function(e) { + assert.equal(e.message, "There is no rebase in progress"); + }); + }) + .then(function() { + return Promise.all([ + repository.getBranchCommit("HEAD"), + repository.getBranchCommit(ourBranchName) + ]); + }) + .then(function(commits) { + assert.equal(commits.length, 2); + + // verify that 'HEAD' and 'ours' are back to their pre-rebase state + assert.equal(commits[0].id().toString(), + "e7f37ee070837052937e24ad8ba66f6d83ae7941"); + assert.equal(commits[1].id().toString(), + "e7f37ee070837052937e24ad8ba66f6d83ae7941"); + }); + }); + + it("can fast-forward via rebase using the convenience methods", + function() { + var ourFileName = "ourNewFile.txt"; + var theirFileName = "theirNewFile.txt"; + + var ourFileContent = "I like Toll Roads. I have an EZ-Pass!"; + var theirFileContent = "I'm skeptical about Toll Roads"; + + var ourSignature = NodeGit.Signature.create + ("Ron Paul", "RonPaul@TollRoadsRBest.info", 123456789, 60); + var theirSignature = NodeGit.Signature.create + ("Greg Abbott", "Gregggg@IllTollYourFace.us", 123456789, 60); + + var repository = this.repository; + var ourCommit; + var theirBranch; + + return fse.writeFile(path.join(repository.workdir(), ourFileName), + ourFileContent) + // Load up the repository index and make our initial commit to HEAD + .then(function() { + return RepoUtils.addFileToIndex(repository, ourFileName); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "11ead82b1135b8e240fb5d61e703312fb9cc3d6a"); + + return repository.createCommit("HEAD", ourSignature, + ourSignature, "we made a commit", oid, []); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "91a183f87842ebb7a9b08dad8bc2473985796844"); + + return repository.getCommit(commitOid).then(function(commit) { + ourCommit = commit; + }).then(function() { + return repository.createBranch(ourBranchName, commitOid) + .then(function(branch) { + return repository.createBranch(theirBranchName, commitOid); + }); + }); + }) + .then(function(branch) { + theirBranch = branch; + return fse.writeFile(path.join(repository.workdir(), theirFileName), + theirFileContent); + }) + .then(function() { + return RepoUtils.addFileToIndex(repository, theirFileName); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "76631cb5a290dafe2959152626bb90f2a6d8ec94"); + + return repository.createCommit(theirBranch.name(), theirSignature, + theirSignature, "they made a commit", oid, [ourCommit]); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "0e9231d489b3f4303635fc4b0397830da095e7e7"); + }) + .then(function() { + // unstage changes so that we can begin a rebase + return removeFileFromIndex(repository, theirFileName); + }) + .then(function() { + return Promise.all([ + repository.getReference(ourBranchName), + repository.getReference(theirBranchName) + ]); + }) + .then(function(refs) { + assert.equal(refs.length, 2); + + return Promise.all([ + NodeGit.AnnotatedCommit.fromRef(repository, refs[0]), + NodeGit.AnnotatedCommit.fromRef(repository, refs[1]) + ]); + }) + .then(function(annotatedCommits) { + assert.equal(annotatedCommits.length, 2); + + var ourAnnotatedCommit = annotatedCommits[0]; + var theirAnnotatedCommit = annotatedCommits[1]; + + assert.equal(ourAnnotatedCommit.id().toString(), + "91a183f87842ebb7a9b08dad8bc2473985796844"); + assert.equal(theirAnnotatedCommit.id().toString(), + "0e9231d489b3f4303635fc4b0397830da095e7e7"); + + return fse.remove(path.join(repository.workdir(), theirFileName)); + }) + .then(function() { + return repository.rebaseBranches(ourBranchName, theirBranchName, null, + ourSignature); + }) + .then(function(commit) { + assert.equal(commit.id().toString(), + "0e9231d489b3f4303635fc4b0397830da095e7e7"); + }); + }); + + it("can rebase using the convenience method", function() { + var baseFileName = "baseNewFile.txt"; + var ourFileName = "ourNewFile.txt"; + var theirFileName = "theirNewFile.txt"; + + var baseFileContent = "How do you feel about Toll Roads?"; + var ourFileContent = "I like Toll Roads. I have an EZ-Pass!"; + var theirFileContent = "I'm skeptical about Toll Roads"; + + var ourSignature = NodeGit.Signature.create + ("Ron Paul", "RonPaul@TollRoadsRBest.info", 123456789, 60); + var theirSignature = NodeGit.Signature.create + ("Greg Abbott", "Gregggg@IllTollYourFace.us", 123456789, 60); + + var repository = this.repository; + var ourCommit; + var ourBranch; + var theirBranch; + + var nextCalls = 0; + + return fse.writeFile(path.join(repository.workdir(), baseFileName), + baseFileContent) + // Load up the repository index and make our initial commit to HEAD + .then(function() { + return RepoUtils.addFileToIndex(repository, baseFileName); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "b5cdc109d437c4541a13fb7509116b5f03d5039a"); + + return repository.createCommit("HEAD", ourSignature, + ourSignature, "initial commit", oid, []); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "be03abdf0353d05924c53bebeb0e5bb129cda44a"); + + return repository.getCommit(commitOid).then(function(commit) { + ourCommit = commit; + }).then(function() { + return repository.createBranch(ourBranchName, commitOid) + .then(function(branch) { + ourBranch = branch; + return repository.createBranch(theirBranchName, commitOid); + }); + }); + }) + .then(function(branch) { + theirBranch = branch; + return fse.writeFile(path.join(repository.workdir(), theirFileName), + theirFileContent); + }) + .then(function() { + return RepoUtils.addFileToIndex(repository, theirFileName); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "be5f0fd38a39a67135ad68921c93cd5c17fefb3d"); + + return repository.createCommit(theirBranch.name(), theirSignature, + theirSignature, "they made a commit", oid, [ourCommit]); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "e9ebd92f2f4778baf6fa8e92f0c68642f931a554"); + + return removeFileFromIndex(repository, theirFileName); + }) + .then(function() { + return fse.remove(path.join(repository.workdir(), theirFileName)); + }) + .then(function() { + return fse.writeFile(path.join(repository.workdir(), ourFileName), + ourFileContent); + }) + .then(function() { + return RepoUtils.addFileToIndex(repository, ourFileName); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "77867fc0bfeb3f80ab18a78c8d53aa3a06207047"); + + return repository.createCommit(ourBranch.name(), ourSignature, + ourSignature, "we made a commit", oid, [ourCommit]); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "e7f37ee070837052937e24ad8ba66f6d83ae7941"); + + return removeFileFromIndex(repository, ourFileName); + }) + .then(function() { + return fse.remove(path.join(repository.workdir(), ourFileName)); + }) + .then(function() { + return repository.checkoutBranch(ourBranchName); + }) + .then(function() { + return repository.rebaseBranches(ourBranchName, theirBranchName, + null, ourSignature, function(rebase) { + assert.ok(rebase instanceof NodeGit.Rebase); + + nextCalls++; + + return Promise.resolve(); + }); + }) + .then(function(commit) { + // verify that the beforeNextFn callback was called + assert.equal(nextCalls, 2); + + // verify that the "ours" branch has moved to the correct place + assert.equal(commit.id().toString(), + "b937100ee0ea17ef20525306763505a7fe2be29e"); + + return commit.parent(0); + }) + .then(function(commit) { + // verify that we are on top of "their commit" + assert.equal(commit.id().toString(), + "e9ebd92f2f4778baf6fa8e92f0c68642f931a554"); + }); + }); + + it("can rebase with conflicts using the convenience methods", function() { + var fileName = "everyonesFile.txt"; + + var baseFileContent = "How do you feel about Toll Roads?\n"; + var ourFileContent = "I like Toll Roads. I have an EZ-Pass!\n"; + var theirFileContent = "I'm skeptical about Toll Roads\n"; + + var expectedConflictedFileContent = + "How do you feel about Toll Roads?\n" + + "<<<<<<< theirs\n" + + "I'm skeptical about Toll Roads\n" + + "=======\n" + + "I like Toll Roads. I have an EZ-Pass!\n" + + ">>>>>>> we made a commit\n"; + + var conflictSolvedFileContent = + "How do you feel about Toll Roads?\n" + + "He's skeptical about Toll Roads,\n" + + "but I like Toll Roads. I have an EZ-Pass!\n"; + + var ourSignature = NodeGit.Signature.create + ("Ron Paul", "RonPaul@TollRoadsRBest.info", 123456789, 60); + var theirSignature = NodeGit.Signature.create + ("Greg Abbott", "Gregggg@IllTollYourFace.us", 123456789, 60); + + var repository = this.repository; + var ourCommit; + var ourBranch; + var theirBranch; + var nextCalls=0; + + return fse.writeFile(path.join(repository.workdir(), fileName), + baseFileContent) + .then(function() { + return RepoUtils.addFileToIndex(repository, fileName); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "044704f62399fecbe22da6d7d47b14e52625630e"); + + return repository.createCommit("HEAD", ourSignature, + ourSignature, "initial commit", oid, []); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "80111c46ac73b857a3493b24c81df08639b5de99"); + + return repository.getCommit(commitOid).then(function(commit) { + ourCommit = commit; + }).then(function() { + return repository.createBranch(ourBranchName, commitOid) + .then(function(branch) { + ourBranch = branch; + return repository.createBranch(theirBranchName, commitOid); + }); + }); + }) + .then(function(branch) { + theirBranch = branch; + return fse.writeFile(path.join(repository.workdir(), fileName), + baseFileContent + theirFileContent); + }) + .then(function() { + return RepoUtils.addFileToIndex(repository, fileName); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "b826e989aca7647bea64810f0a2a38acbbdd4c1a"); + + return repository.createCommit(theirBranch.name(), theirSignature, + theirSignature, "they made a commit", oid, [ourCommit]); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "b3c355bb606ec7da87174dfa1a0b0c0e3dc97bc0"); + + return fse.writeFile(path.join(repository.workdir(), fileName), + baseFileContent + ourFileContent); + }) + .then(function() { + return RepoUtils.addFileToIndex(repository, fileName); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "e7fe41bf7c0c28766887a63ffe2f03f624276fbe"); + + return repository.createCommit(ourBranch.name(), ourSignature, + ourSignature, "we made a commit", oid, [ourCommit]); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "28cfeb17f66132edb3c4dacb7ff38e8dd48a1844"); + + var opts = { + checkoutStrategy: NodeGit.Checkout.STRATEGY.FORCE + }; + + return NodeGit.Checkout.head(repository, opts); + }) + .then(function() { + return repository.rebaseBranches(ourBranchName, theirBranchName, + null, ourSignature) + .then(function(commit) { + assert.fail(commit, undefined, + "The index should have been thrown due to merge conflicts"); + }) + .catch(function(index) { + assert.ok(index); + assert.ok(index.hasConflicts()); + + assert.ok(repository.isRebasing()); + }); + }) + .then(function() { + return fse.readFile(path.join(repository.workdir(), fileName), "utf8") + .then(function(fileContent) { + assert.equal(fileContent, expectedConflictedFileContent); + + return fse.writeFile(path.join(repository.workdir(), fileName), + conflictSolvedFileContent); + }); + }) + .then(function() { + return RepoUtils.addFileToIndex(repository, fileName); + }) + .then(function(oid) { + return repository.refreshIndex() + .then(function(index) { + assert.ok(!index.hasConflicts()); + + return repository.continueRebase(ourSignature, function(rebase) { + assert.ok(rebase instanceof NodeGit.Rebase); + + nextCalls++; + + return Promise.resolve(); + }); + }); + }) + .then(function(commit) { + // verify that the beforeNextFn callback was called + assert.equal(nextCalls, 1); + + // verify that the "ours" branch has moved to the correct place + assert.equal(commit.id().toString(), + "ef6d0e95167435b3d58f51ab165948c72f6f94b6"); + + assert.ok(!repository.isRebasing()); + assert.ok(repository.isDefaultState()); + + return commit.parent(0); + }) + .then(function(commit) { + // verify that we are on top of "their commit" + assert.equal(commit.id().toString(), + "b3c355bb606ec7da87174dfa1a0b0c0e3dc97bc0"); + }); + }); +}); diff --git a/test/tests/refs.js b/test/tests/refs.js new file mode 100644 index 000000000..c2b148cc5 --- /dev/null +++ b/test/tests/refs.js @@ -0,0 +1,78 @@ +var assert = require("assert"); +var path = require("path"); +var local = path.join.bind(path, __dirname); + +var exec = require("../../utils/execPromise"); + +describe("Reference", function() { + var NodeGit = require("../../"); + var Repository = NodeGit.Repository; + var Reference = NodeGit.Reference; + + var reposPath = local("../repos/workdir"); + var refName = "refs/heads/master"; + + before(function() { + var test = this; + + return exec("git reset --hard origin/master", {cwd: reposPath}) + .then(function() { + return Repository.open(reposPath); + }) + .then(function(repository) { + test.repository = repository; + + return repository.getReference(refName); + }) + .then(function(reference) { + test.reference = reference; + }); + }); + + it("can look up a reference", function() { + assert.ok(this.reference instanceof Reference); + }); + + it("can determine if the reference is symbolic", function() { + assert.equal(this.reference.isSymbolic(), false); + }); + + it("can determine if the reference is not symbolic", function() { + assert.ok(this.reference.isConcrete()); + }); + + it("can check that reference is valid", function() { + assert.ok(this.reference.isValid()); + }); + + it("can return refName when casting toString", function() { + assert.equal(this.reference.toString(), refName); + }); + + it("can compare two identical references", function() { + assert.equal(this.reference.cmp(this.reference), 0); + }); + + it("can compare two different references", function() { + var ref = this.reference; + + return this.repository.getReference("checkout-test") + .then(function(otherRef) { + assert.notEqual(ref.cmp(otherRef), 0); + }); + }); + + it("will return undefined looking up the symbolic target if not symbolic", + function() { + assert(this.reference.symbolicTarget() === undefined); + }); + + it("can look up the HEAD sha", function() { + return Reference.nameToId(this.repository, "HEAD") + .then(function(oid) { + var sha = oid.allocfmt(); + assert.equal(sha, "32789a79e71fbc9e04d3eff7425e1771eb595150"); + }); + }); + +}); diff --git a/test/tests/remote.js b/test/tests/remote.js new file mode 100644 index 000000000..165cc8214 --- /dev/null +++ b/test/tests/remote.js @@ -0,0 +1,444 @@ +var assert = require("assert"); +var path = require("path"); +var local = path.join.bind(path, __dirname); + +var garbageCollect = require("../utils/garbage_collect.js"); + +describe("Remote", function() { + var NodeGit = require("../../"); + var Repository = NodeGit.Repository; + var Remote = NodeGit.Remote; + + var reposPath = local("../repos/workdir"); + var url = "https://github.com/nodegit/test"; + var url2 = "https://github.com/nodegit/test2"; + var privateUrl = "git@github.com:nodegit/private"; + + function removeNonOrigins(repo) { + return repo.getRemotes() + .then(function(remotes) { + return remotes.reduce(function(promise, remote) { + if (remote !== "origin") { + promise = promise.then(function() { + return Remote.delete(repo, remote); + }); + } + + return promise; + }, Promise.resolve()); + }); + } + + beforeEach(function() { + var test = this; + + return Repository.open(reposPath) + .then(function(repository) { + test.repository = repository; + + return Remote.lookup(repository, "origin"); + }) + .then(function(remote) { + test.remote = remote; + + return removeNonOrigins(test.repository); + }); + }); + + after(function() { + return removeNonOrigins(this.repository); + }); + + it("can load a remote", function() { + assert.ok(this.remote instanceof Remote); + }); + + it("can read the remote url", function() { + assert.equal(this.remote.url().replace(".git", ""), url); + }); + + it("has an empty pushurl by default", function() { + assert.equal(this.remote.pushurl(), undefined); + }); + + it("can set a remote", function() { + var repository = this.repository; + + return Remote.create(repository, "origin1", url) + .then(function() { + return Remote.setPushurl(repository, "origin1", "https://google.com/"); + }) + .then(function() { + return Remote.lookup(repository, "origin1"); + }) + .then(function(remote) { + assert.equal(remote.pushurl(), "https://google.com/"); + }); + }); + + it("can read the remote name", function() { + assert.equal(this.remote.name(), "origin"); + }); + + it("can create and load a new remote", function() { + var repository = this.repository; + + return Remote.create(repository, "origin2", url) + .then(function() { + return Remote.lookup(repository, "origin2"); + }) + .then(function(remote) { + assert(remote.url(), url); + }); + }); + + it("can delete a remote", function() { + var repository = this.repository; + + return Remote.create(repository, "origin3", url) + .then(function() { + return Remote.delete(repository, "origin3"); + }) + .then(function() { + return Remote.lookup(repository, "origin3") + // We only want to catch the failed lookup + .then(Promise.reject.bind(Promise), Promise.resolve.bind(Promise)); + }); + }); + + it("can download from a remote", function() { + var repo = this.repository; + var remoteCallbacks; + + return repo.getRemote("origin") + .then(function(remote) { + remoteCallbacks = { + certificateCheck: function() { + return 1; + } + }; + + return remote.connect(NodeGit.Enums.DIRECTION.FETCH, remoteCallbacks) + .then(function() { + return remote.download(null); + }).then(function() { + return remote.disconnect(); + }); + }); + }); + + it("can monitor transfer progress while downloading", function() { + // Set a reasonable timeout here now that our repository has grown. + this.timeout(600000); + + var repo = this.repository; + var wasCalled = false; + + return Remote.create(repo, "test2", url2) + .then(function(remote) { + var fetchOpts = { + callbacks: { + credentials: function(url, userName) { + return NodeGit.Cred.sshKeyFromAgent(userName); + }, + certificateCheck: function() { + return 1; + }, + + transferProgress: function() { + wasCalled = true; + } + } + }; + + return remote.fetch(null, fetchOpts, null); + }) + .then(function() { + assert.ok(wasCalled); + + return Remote.delete(repo, "test2"); + }); + }); + + it("can get the default branch of a remote", function() { + var remoteCallbacks = { + certificateCheck: function() { + return 1; + } + }; + + var remote = this.remote; + + return remote.connect(NodeGit.Enums.DIRECTION.FETCH, remoteCallbacks) + .then(function() { return remote.defaultBranch(); }) + .then(function(branchName) { + assert.equal("refs/heads/master", branchName); + }); + }); + + it("can fetch from a remote", function() { + return this.repository.fetch("origin", { + callbacks: { + credentials: function(url, userName) { + return NodeGit.Cred.sshKeyFromAgent(userName); + }, + certificateCheck: function() { + return 1; + } + } + }); + }); + + it("can fetch from a private repository", function() { + var repo = this.repository; + var fetchOptions = { + callbacks: { + credentials: function(url, userName) { + return NodeGit.Cred.sshKeyNew( + userName, + path.resolve("./test/nodegit-test-rsa.pub"), + path.resolve("./test/nodegit-test-rsa"), + "" + ); + }, + certificateCheck: function() { + return 1; + } + } + }; + + return Remote.create(repo, "private", privateUrl) + .then(function(remote) { + return remote.fetch(null, fetchOptions, "Fetch from private"); + }) + .catch(function() { + assert.fail("Unable to fetch from private repository"); + }); + }); + + it("can reject fetching from private repository without valid credentials", + function() { + var repo = this.repository; + var firstPass = true; + var fetchOptions = { + callbacks: { + credentials: function(url, userName) { + if (firstPass) { + firstPass = false; + return NodeGit.Cred.sshKeyFromAgent(userName); + } + }, + certificateCheck: function() { + return 1; + } + } + }; + + return Remote.create(repo, "private", privateUrl) + .then(function(remote) { + return remote.fetch(null, fetchOptions, "Fetch from private"); + }) + .then(function () { + assert.fail("Should not be able to fetch from repository"); + }) + .catch(function(error) { + assert.equal( + error.message.trim(), + "ERROR: Repository not found.", + "Should not be able to find repository." + ); + }); + }); + + it("can fetch from all remotes", function() { + var repository = this.repository; + + return Remote.create(repository, "test1", url) + .then(function() { + return Remote.create(repository, "test2", url2); + }) + .then(function() { + return repository.fetchAll({ + callbacks: { + credentials: function(url, userName) { + return NodeGit.Cred.sshKeyFromAgent(userName); + }, + certificateCheck: function() { + return 1; + } + } + }); + }); + }); + + it("will reject if credentials promise rejects", function() { + var repo = this.repository; + var branch = "should-not-exist"; + return Remote.lookup(repo, "origin") + .then(function(remote) { + var ref = "refs/heads/" + branch; + var refs = [ref + ":" + ref]; + var options = { + callbacks: { + credentials: function(url, userName) { + var test = Promise.resolve("test") + .then(function() { return; }) + .then(function() { return; }) + .then(function() { return; }) + .then(function() { + return Promise.reject(new Error("failure case")); + }); + return test; + }, + certificateCheck: function() { + return 1; + } + } + }; + return remote.push(refs, options); + }) + .then(function() { + return Promise.reject( + new Error("should not be able to push to the repository")); + }, function(err) { + if (err.message === "failure case") + { + return Promise.resolve(); + } else { + throw err; + } + }) + .then(function() { + return Remote.lookup(repo, "origin"); + }) + .then(function(remote) { + var ref = "refs/heads/" + branch; + var refs = [ref + ":" + ref]; + var options = { + callbacks: { + credentials: function(url, userName) { + var test = Promise.resolve() + .then(Promise.resolve.bind(Promise)) + .then(Promise.resolve.bind(Promise)) + .then(Promise.resolve.bind(Promise)) + .then(Promise.reject.bind(Promise)); + return test; + }, + certificateCheck: function() { + return 1; + } + } + }; + return remote.push(refs, options); + }) + .then(function() { + return Promise.reject( + new Error("should not be able to push to the repository")); + }, function(err) { + if (err.message === "Method push has thrown an error.") + { + return Promise.resolve(); + } else { + throw err; + } + }); + }); + + it("cannot push to a repository with invalid credentials", function() { + var repo = this.repository; + var branch = "should-not-exist"; + return Remote.lookup(repo, "origin") + .then(function(remote) { + var ref = "refs/heads/" + branch; + var refs = [ref + ":" + ref]; + var firstPass = true; + var options = { + callbacks: { + credentials: function(url, userName) { + if (firstPass) { + firstPass = false; + if (url.indexOf("https") === -1) { + return NodeGit.Cred.sshKeyFromAgent(userName); + } else { + return NodeGit.Cred.userpassPlaintextNew(userName, ""); + } + } else { + return Promise.reject(); + } + }, + certificateCheck: function() { + return 1; + } + } + }; + return remote.push(refs, options); + }) + // takes care of windows bug, see the .catch for the proper pathway + // that this flow should take (cred cb doesn't run twice -> throws error) + .then(function() { + return Promise.reject( + new Error("should not be able to push to the repository")); + }, function(err) { + if (err.message.indexOf(401) === -1) { + throw err; + } else { + return Promise.resolve(); + } + }) + // catches linux / osx failure to use anonymous credentials + // stops callback infinite loop + .catch(function (reason) { + if (reason.message !== + "Method push has thrown an error.") + { + throw reason; + } else { + return Promise.resolve(); + } + }); + }); + + it("is kept alive by refspec", function() { + var repo = this.repository; + var Remote = NodeGit.Remote; + + garbageCollect(); + var startSelfFreeingCount = Remote.getSelfFreeingInstanceCount(); + var startNonSelfFreeingCount = Remote.getNonSelfFreeingConstructedCount(); + + var resolve; + var promise = new Promise(function(_resolve) { resolve = _resolve; }); + + var remote; + + repo.getRemote("origin") + .then(function(_remote) { + remote = _remote; + setTimeout(resolve, 0); + }); + + return promise + .then(function() { + // make sure we have created one self-freeing remote + assert.equal(startSelfFreeingCount + 1, + Remote.getSelfFreeingInstanceCount()); + assert.equal(startNonSelfFreeingCount, + Remote.getNonSelfFreeingConstructedCount()); + var refspec = remote.getRefspec(0); + assert.equal("refs/heads/*", refspec.src()); + remote = null; + garbageCollect(); + // the refspec should be holding on to the remote + assert.equal(startSelfFreeingCount + 1, + Remote.getSelfFreeingInstanceCount()); + + assert.equal("refs/heads/*", refspec.src()); + + refspec = null; + garbageCollect(); + // the remote should be freed now + assert.equal(startSelfFreeingCount, + Remote.getSelfFreeingInstanceCount()); + }); + }); +}); diff --git a/test/tests/repository.js b/test/tests/repository.js new file mode 100644 index 000000000..be165b303 --- /dev/null +++ b/test/tests/repository.js @@ -0,0 +1,330 @@ +var assert = require("assert"); +var path = require("path"); +var promisify = require("promisify-node"); +var fse = promisify(require("fs-extra")); +var local = path.join.bind(path, __dirname); +var IndexUtils = require("../utils/index_setup"); +var RepoUtils = require("../utils/repository_setup"); + +describe("Repository", function() { + var NodeGit = require("../../"); + var Repository = NodeGit.Repository; + var Index = NodeGit.Index; + var Signature = NodeGit.Signature; + + var reposPath = local("../repos/workdir"); + var newRepoPath = local("../repos/newrepo"); + var emptyRepoPath = local("../repos/empty"); + + beforeEach(function() { + var test = this; + + return Repository.open(reposPath) + .then(function(repository) { + test.repository = repository; + }) + .then(function() { + return Repository.open(emptyRepoPath); + }) + .then(function(emptyRepo) { + test.emptyRepo = emptyRepo; + }); + }); + + it("cannot instantiate a repository", function() { + assert.throws( + function() { new Repository(); }, + undefined, + "hello" + ); + }); + + it("can open a valid repository", function() { + assert.ok(this.repository instanceof Repository); + }); + + it("cannot open an invalid repository", function() { + return Repository.open("repos/nonrepo") + .then(null, function(err) { + assert.ok(err instanceof Error); + }); + }); + + it("does not try to open paths that don't exist", function() { + var missingPath = "/surely/this/directory/does/not/exist/on/this/machine"; + + return Repository.open(missingPath) + .then(null, function(err) { + assert.ok(err instanceof Error); + }); + }); + + it("can initialize a repository into a folder", function() { + return Repository.init(newRepoPath, 1) + .then(function(path, isBare) { + return Repository.open(newRepoPath); + }); + }); + + it("can utilize repository init options", function() { + return fse.remove(newRepoPath) + .then(function() { + return Repository.initExt(newRepoPath, { + flags: Repository.INIT_FLAG.MKPATH + }); + }); + }); + + it("can be cleaned", function() { + this.repository.cleanup(); + + // try getting a commit after cleanup (to test that the repo is usable) + return this.repository.getHeadCommit() + .then(function(commit) { + assert.equal( + commit.toString(), + "32789a79e71fbc9e04d3eff7425e1771eb595150" + ); + }); + }); + + it("can read the index", function() { + return this.repository.index() + .then(function(index) { + assert.ok(index instanceof Index); + }); + }); + + it("can list remotes", function() { + return this.repository.getRemotes() + .then(function(remotes) { + assert.equal(remotes.length, 1); + assert.equal(remotes[0], "origin"); + }); + }); + + it("can get the current branch", function() { + return this.repository.getCurrentBranch() + .then(function(branch) { + assert.equal(branch.shorthand(), "master"); + }); + }); + + it("can get the default signature", function() { + var sig = this.repository.defaultSignature(); + + assert(sig instanceof Signature); + }); + + it("gets statuses with StatusFile", function() { + var fileName = "my-new-file-that-shouldnt-exist.file"; + var fileContent = "new file from repository test"; + var repo = this.repository; + var filePath = path.join(repo.workdir(), fileName); + + return fse.writeFile(filePath, fileContent) + .then(function() { + return repo.getStatus().then(function(statuses) { + assert.equal(statuses.length, 1); + assert.equal(statuses[0].path(), fileName); + assert.ok(statuses[0].isNew()); + }); + }) + .then(function() { + return fse.remove(filePath); + }) + .catch(function (e) { + return fse.remove(filePath) + .then(function() { + return Promise.reject(e); + }); + }); + }); + + it("gets extended statuses", function() { + var fileName = "my-new-file-that-shouldnt-exist.file"; + var fileContent = "new file from repository test"; + var repo = this.repository; + var filePath = path.join(repo.workdir(), fileName); + + return fse.writeFile(filePath, fileContent) + .then(function() { + return repo.getStatusExt(); + }) + .then(function(statuses) { + assert.equal(statuses.length, 1); + assert.equal(statuses[0].path(), fileName); + assert.equal(statuses[0].indexToWorkdir().newFile().path(), fileName); + assert.ok(statuses[0].isNew()); + }) + .then(function() { + return fse.remove(filePath); + }) + .catch(function (e) { + return fse.remove(filePath) + .then(function() { + return Promise.reject(e); + }); + }); + }); + + it("gets fetch-heads", function() { + var repo = this.repository; + var foundMaster; + + return repo.fetch("origin", { + credentials: function(url, userName) { + return NodeGit.Cred.sshKeyFromAgent(userName); + }, + certificateCheck: function() { + return 1; + } + }) + .then(function() { + return repo.fetchheadForeach(function(refname, remoteUrl, oid, isMerge) { + if (refname == "refs/heads/master") { + foundMaster = true; + assert.equal(refname, "refs/heads/master"); + assert.equal(remoteUrl, "https://github.com/nodegit/test"); + assert.equal( + oid.toString(), + "32789a79e71fbc9e04d3eff7425e1771eb595150"); + assert.equal(isMerge, 1); + } + }); + }) + .then(function() { + if (!foundMaster) { + throw new Error("Couldn't find master in iteration of fetch heads"); + } + }); + }); + + it("can discover if a path is part of a repository", function() { + var testPath = path.join(reposPath, "lib", "util", "normalize_oid.js"); + var expectedPath = path.join(reposPath, ".git"); + return NodeGit.Repository.discover(testPath, 0, "") + .then(function(foundPath) { + assert.equal(expectedPath, foundPath); + }); + }); + + it("can create a repo using initExt", function() { + var initFlags = NodeGit.Repository.INIT_FLAG.NO_REINIT | + NodeGit.Repository.INIT_FLAG.MKPATH | + NodeGit.Repository.INIT_FLAG.MKDIR; + return fse.remove(newRepoPath) + .then(function() { + return NodeGit.Repository.initExt(newRepoPath, { flags: initFlags }); + }) + .then(function() { + return NodeGit.Repository.open(newRepoPath); + }); + }); + + it("will throw when a repo cannot be initialized using initExt", function() { + var initFlags = NodeGit.Repository.INIT_FLAG.NO_REINIT | + NodeGit.Repository.INIT_FLAG.MKPATH | + NodeGit.Repository.INIT_FLAG.MKDIR; + + var nonsensePath = "gibberish"; + + return NodeGit.Repository.initExt(nonsensePath, { flags: initFlags }) + .then(function() { + assert.fail("Should have thrown an error."); + }) + .catch(function(error) { + assert(error, "Should have thrown an error."); + }); + }); + + it("can get the head commit", function() { + return this.repository.getHeadCommit() + .then(function(commit) { + assert.equal( + commit.toString(), + "32789a79e71fbc9e04d3eff7425e1771eb595150" + ); + }); + }); + + it("returns null if there is no head commit", function() { + return this.emptyRepo.getHeadCommit() + .then(function(commit) { + assert(!commit); + }); + }); + + it("can commit on head on a empty repo with createCommitOnHead", function() { + var fileName = "my-new-file-that-shouldnt-exist.file"; + var fileContent = "new file from repository test"; + var repo = this.emptyRepo; + var filePath = path.join(repo.workdir(), fileName); + var authSig = repo.defaultSignature(); + var commitSig = repo.defaultSignature(); + var commitMsg = "Doug this has been commited"; + + return fse.writeFile(filePath, fileContent) + .then(function() { + return repo.createCommitOnHead( + [fileName], + authSig, + commitSig, + commitMsg + ); + }) + .then(function(oidResult) { + return repo.getHeadCommit() + .then(function(commit) { + assert.equal( + commit.toString(), + oidResult.toString() + ); + }); + }); + }); + + it("can get all merge heads in a repo with mergeheadForeach", function() { + var repo; + var repoPath = local("../repos/merge-head"); + var ourBranchName = "ours"; + var theirBranchName = "theirs"; + var theirBranch; + var fileName = "testFile.txt"; + var numMergeHeads = 0; + var assertBranchTargetIs = function (theirBranch, mergeHead) { + assert.equal(theirBranch.target(), mergeHead.toString()); + numMergeHeads++; + }; + + return RepoUtils.createRepository(repoPath) + .then(function(_repo) { + repo = _repo; + return IndexUtils.createConflict( + repo, + ourBranchName, + theirBranchName, + fileName + ); + }) + .then(function() { + return repo.getBranch(theirBranchName); + }) + .then(function(_theirBranch) { + // Write the MERGE_HEAD file manually since createConflict does not + theirBranch = _theirBranch; + return fse.writeFile( + path.join(repoPath, ".git", "MERGE_HEAD"), + theirBranch.target().toString() + "\n" + ); + }) + .then(function() { + return repo.mergeheadForeach( + assertBranchTargetIs.bind(this, theirBranch) + ); + }) + .then(function() { + assert.equal(numMergeHeads, 1); + }); + }); +}); diff --git a/test/tests/reset.js b/test/tests/reset.js new file mode 100644 index 000000000..7cfea3dd3 --- /dev/null +++ b/test/tests/reset.js @@ -0,0 +1,220 @@ +var assert = require("assert"); +var path = require("path"); +var local = path.join.bind(path, __dirname); +var promisify = require("promisify-node"); +var fse = promisify(require("fs-extra")); + +describe("Reset", function() { + var NodeGit = require("../../"); + var Repository = NodeGit.Repository; + var Reset = NodeGit.Reset; + + var reposPath = local("../repos/workdir"); + var currentCommitOid = "32789a79e71fbc9e04d3eff7425e1771eb595150"; + var previousCommitOid = "c82fb078a192ea221c9f1093c64321c60d64aa0d"; + var filePath = "package.json"; + + beforeEach(function() { + var test = this; + + return Repository.open(reposPath) + .then(function(repository) { + test.repo = repository; + + return test.repo.getCommit(currentCommitOid); + }) + .then(function(commit) { + test.currentCommit = commit; + + return commit.getEntry(filePath); + }) + .then(function(entry) { + return entry.getBlob(); + }) + .then(function(blob) { + test.currentCommitBlob = blob; + + return test.repo.getCommit(previousCommitOid); + }) + .then(function(commit) { + test.previousCommit = commit; + + return commit.getEntry(filePath); + }) + .then(function(entry) { + return entry.getBlob(); + }) + .then(function(blob) { + test.previousCommitBlob = blob; + }); + }); + + it("can reset a file to a previous commit", function() { + var test = this; + + return Reset.default(test.repo, test.previousCommit, filePath) + .then(function() { + return test.repo.refreshIndex(); + }) + .then(function(index) { + return index.writeTree(); + }) + .then(function(oid) { + return test.repo.getTree(oid); + }) + .then(function(tree) { + return tree.getEntry(filePath); + }) + .then(function(entry) { + return entry.getBlob(); + }) + .then(function(blob) { + var currentCommitContents = test.currentCommitBlob.toString(); + var previousCommitContents = test.previousCommitBlob.toString(); + var resetContents = blob.toString(); + + assert(resetContents != currentCommitContents); + assert(resetContents == previousCommitContents); + }) + .then(function() { + return Reset.default(test.repo, test.currentCommit, filePath); + }) + .then(function() { + return test.repo.refreshIndex(); + }) + .then(function(index) { + return index.writeTree(); + }) + .then(function(oid) { + return test.repo.getTree(oid); + }) + .then(function(tree) { + return tree.getEntry(filePath); + }) + .then(function(entry) { + return entry.getBlob(); + }) + .then(function(blob) { + var currentCommitContents = test.currentCommitBlob.toString(); + var previousCommitContents = test.previousCommitBlob.toString(); + var resetContents = blob.toString(); + + assert(resetContents == currentCommitContents); + assert(resetContents != previousCommitContents); + }); + }); + + it("can perform a soft reset", function() { + var test = this; + + return Reset.reset(test.repo, test.previousCommit, Reset.TYPE.SOFT) + .then(function() { + return test.repo.refreshIndex(); + }) + .then(function(index) { + return index.writeTree(); + }) + .then(function(oid) { + return test.repo.getTree(oid); + }) + .then(function(tree) { + return tree.getEntry(filePath); + }) + .then(function(entry) { + return entry.getBlob(); + }) + .then(function(blob) { + var currentCommitContents = test.currentCommitBlob.toString(); + var previousCommitContents = test.previousCommitBlob.toString(); + var resetContents = blob.toString(); + + // With a soft reset all of the changes should be in the index + // still so the index should still == what we had at the current + // commit and not the one we reset to + assert(resetContents == currentCommitContents); + assert(resetContents != previousCommitContents); + + return Reset(test.repo, test.currentCommit, Reset.TYPE.HARD); + }); + }); + + it("can perform a mixed reset", function() { + var test = this; + + return Reset.reset(test.repo, test.previousCommit, Reset.TYPE.MIXED) + .then(function() { + return test.repo.refreshIndex(); + }) + .then(function(index) { + return index.writeTree(); + }) + .then(function(oid) { + return test.repo.getTree(oid); + }) + .then(function(tree) { + return tree.getEntry(filePath); + }) + .then(function(entry) { + return entry.getBlob(); + }) + .then(function(blob) { + var currentCommitContents = test.currentCommitBlob.toString(); + var previousCommitContents = test.previousCommitBlob.toString(); + var resetContents = blob.toString(); + + // With a mixed reset all of the changes should removed from the index + // but still in the working directory. (i.e. unstaged) + assert(resetContents != currentCommitContents); + assert(resetContents == previousCommitContents); + + return fse.readFile(path.join(test.repo.workdir(), filePath)); + }) + .then(function(fileContents) { + var currentCommitContents = test.currentCommitBlob.toString(); + + assert(fileContents == currentCommitContents); + + return Reset.reset(test.repo, test.currentCommit, Reset.TYPE.HARD); + }); + }); + + it("can perform a hard reset", function() { + var test = this; + + return Reset.reset(test.repo, test.previousCommit, Reset.TYPE.HARD) + .then(function() { + return test.repo.refreshIndex(); + }) + .then(function(index) { + return index.writeTree(); + }) + .then(function(oid) { + return test.repo.getTree(oid); + }) + .then(function(tree) { + return tree.getEntry(filePath); + }) + .then(function(entry) { + return entry.getBlob(); + }) + .then(function(blob) { + var currentCommitContents = test.currentCommitBlob.toString(); + var previousCommitContents = test.previousCommitBlob.toString(); + var resetContents = blob.toString(); + + // With a hard reset all of the changes should removed from the index + // and also removed from the working directory + assert(resetContents != currentCommitContents); + assert(resetContents == previousCommitContents); + + return fse.readFile(path.join(test.repo.workdir(), filePath)); + }) + .then(function(fileContents) { + var previousCommitContents = test.previousCommitBlob.toString(); + + assert(fileContents == previousCommitContents); + + return Reset.reset(test.repo, test.currentCommit, Reset.TYPE.HARD); + }); + }); +}); diff --git a/test/tests/revert.js b/test/tests/revert.js new file mode 100644 index 000000000..b3f151831 --- /dev/null +++ b/test/tests/revert.js @@ -0,0 +1,63 @@ +var _ = require("lodash"); +var assert = require("assert"); +var RepoUtils = require("../utils/repository_setup"); +var path = require("path"); +var fs = require("fs"); +var local = path.join.bind(path, __dirname); + +describe("Revert", function() { + var NodeGit = require("../../"); + + var Revert = NodeGit.Revert; + var RevertOptions = NodeGit.RevertOptions; + + var test; + var fileName = "foobar.js"; + var repoPath = local("../repos/revertRepo"); + + beforeEach(function() { + test = this; + + return RepoUtils.createRepository(repoPath) + .then(function(repository) { + test.repository = repository; + + return RepoUtils.commitFileToRepo( + repository, + fileName, + "line1\nline2\nline3" + ); + }) + .then(function(firstCommit) { + test.firstCommit = firstCommit; + }); + }); + + it("revert modifies the working directoy", function() { + var fileStats = fs.statSync(path.join(repoPath, fileName)); + assert.ok(fileStats.isFile()); + + Revert.revert(test.repository, test.firstCommit, new RevertOptions()) + .then(function() { + try { + fs.statSync(path.join(repoPath, fileName)); + assert.fail("Working directory was not reverted"); + } + catch (error) { + // pass + } + }); + }); + + it("revert modifies the index", function() { + Revert.revert(test.repository, test.firstCommit, new RevertOptions()) + .then(function() { + return test.repository.index(); + }) + .then(function(index) { + var entries = index.entries; + assert.equal(1, entries.length); + assert.ok(_.endsWith(fileName, entries[0].path)); + }); + }); +}); diff --git a/test/tests/revparse.js b/test/tests/revparse.js new file mode 100644 index 000000000..a097b9a3e --- /dev/null +++ b/test/tests/revparse.js @@ -0,0 +1,66 @@ +var assert = require("assert"); +var path = require("path"); +var local = path.join.bind(path, __dirname); + +describe("Revparse", function() { + var NodeGit = require("../../"); + var Repository = NodeGit.Repository; + var Revparse = NodeGit.Revparse; + + var reposPath = local("../repos/workdir"); + + beforeEach(function() { + var test = this; + return Repository.open(reposPath) + .then(function(repository) { + test.repository = repository; + return test.repository.getHeadCommit(); + }) + .then(function(commit) { + test.commit = commit; + }); + }); + + it("can revparse HEAD commit with single method", function() { + var test = this; + return Revparse.single(this.repository, "HEAD") + .then(function(headCommit) { + assert.ok(headCommit.isCommit()); + assert.equal(headCommit.id().toString(), test.commit.id().toString()); + }); + }); + + it("will fail on invalid spec", function() { + return Revparse.single(this.repository, "INVALID") + .then(function() { + + }) + .catch(function(error) { + assert.ok(error instanceof Error); + assert.equal(error.message, "Revspec 'INVALID' not found."); + }); + }); + + it("will fail without repo", function() { + return Revparse.single("", "INVALID") + .then(function() { + + }) + .catch(function(error) { + assert.ok(error instanceof Error); + assert.equal(error.message, "Repository repo is required."); + }); + }); + + it("will fail without spec", function() { + return Revparse.single(this.repository) + .then(function() { + + }) + .catch(function(error) { + assert.ok(error instanceof Error); + assert.equal(error.message, "String spec is required."); + }); + }); + +}); diff --git a/test/tests/revwalk.js b/test/tests/revwalk.js new file mode 100644 index 000000000..cc3942c37 --- /dev/null +++ b/test/tests/revwalk.js @@ -0,0 +1,372 @@ +var assert = require("assert"); +var RepoUtils = require("../utils/repository_setup"); +var promisify = require("promisify-node"); +var fse = promisify(require("fs-extra")); +var path = require("path"); +var local = path.join.bind(path, __dirname); + +var leakTest = require("../utils/leak_test"); + +describe("Revwalk", function() { + var NodeGit = require("../../"); + var Repository = NodeGit.Repository; + var Revwalk = NodeGit.Revwalk; + var Oid = NodeGit.Oid; + + var reposPath = local("../repos/workdir"); + + // Set a reasonable timeout here now that our repository has grown. + this.timeout(120000); + + beforeEach(function() { + var test = this; + return Repository.open(reposPath) + .then(function(repository) { + test.repository = repository; + return test.repository.getBranchCommit("rev-walk"); + }) + .then(function(commit) { + test.commit = commit; + }); + }); + + beforeEach(function() { + this.walker = this.repository.createRevWalk(); + this.walker.sorting(NodeGit.Revwalk.SORT.TIME); + this.walker.push(this.commit.id()); + }); + + it("can create a walker", function() { + assert.ok(this.walker instanceof Revwalk); + }); + + it("can push an object", function() { + var sha = this.commit.sha(); + + return this.walker.next() + .then(function(commit) { + assert.equal(sha, commit); + }); + }); + + it("can hide an object", function() { + var test = this; + + return next(test.walker, 4) + .then(function(commit) { + assert.equal(commit.toString(), + "b8a94aefb22d0534cc0e5acf533989c13d8725dc"); + + test.walker = test.repository.createRevWalk(); + test.walker.push(test.commit.id()); + test.walker.hide( + Oid.fromString("b8a94aefb22d0534cc0e5acf533989c13d8725dc")); + + return next(test.walker, 3); + }) + .then(function(commit) { + assert.equal(commit.toString(), + "95f695136203a372751c19b6353aeb5ae32ea40e"); + return next(test.walker, 1); + }) + .then(function(commit) { + assert.equal(commit, undefined); + }); + }); + + it("can simplify to first parent", function() { + var test = this; + + test.walker.simplifyFirstParent(); + return next(test.walker, 3) + .then(function(commit) { + assert.equal(commit.toString(), + "b8a94aefb22d0534cc0e5acf533989c13d8725dc"); + }); + }); + + it("can get a specified number of commits", function() { + var test = this; + var storedCommits; + return test.walker.getCommits(10) + .then(function(commits) { + assert.equal(commits.length, 10); + storedCommits = commits; + test.walker = test.repository.createRevWalk(); + test.walker.push(test.commit.id()); + + return test.walker.getCommits(8); + }) + .then(function(commits) { + assert.equal(commits.length, 8); + for (var i = 0; i < 8; i++) { + assert.equal(commits[i].toString(), storedCommits[i].toString()); + } + }); + }); + + it("can get the largest number of commits within a specified range", + function() { + var test = this; + var storedCommits; + return test.walker.getCommits(991) + .then(function(commits) { + assert.equal(commits.length, 990); + storedCommits = commits; + test.walker = test.repository.createRevWalk(); + test.walker.push(test.commit.id()); + }); + }); + + it("will return all commits from the revwalk if nothing matches", function() { + var test = this; + var magicSha = "notintherepoatallwhatsoeverisntthatcool"; + + function checkCommit(commit) { + return commit.toString() != magicSha; + } + + return test.walker.getCommitsUntil(checkCommit) + .then(function(commits) { + assert.equal(commits.length, 990); + }); + }); + + it("can get commits until you tell it not to", function() { + var test = this; + var magicSha = "b8a94aefb22d0534cc0e5acf533989c13d8725dc"; + + function checkCommit(commit) { + return commit.toString() != magicSha; + } + + return test.walker.getCommitsUntil(checkCommit) + .then(function(commits) { + assert.equal(commits.length, 4); + assert.equal(commits[commits.length-1].toString(), magicSha); + }); + }); + + it("can do a fast walk", function() { + var test = this; + var magicSha = "b8a94aefb22d0534cc0e5acf533989c13d8725dc"; + + return test.walker.fastWalk(10) + .then(function(commitOids) { + assert.equal(commitOids.length, 10); + assert.equal(commitOids[3].toString(), magicSha); + }); + }); + + it("can get the history of a file", function() { + var test = this; + var magicShas = [ + "6ed3027eda383d417457b99b38c73f88f601c368", + "95cefff6aabd3c1f6138ec289f42fec0921ff610", + "7ad92a7e4d26a1af93f3450aea8b9d9b8069ea8c", + "96f077977eb1ffcb63f9ce766cdf110e9392fdf5", + "694adc5369687c47e02642941906cfc5cb21e6c2", + "eebd0ead15d62eaf0ba276da53af43bbc3ce43ab", + "1273fff13b3c28cfdb13ba7f575d696d2a8902e1" + ]; + + return test.walker.fileHistoryWalk("include/functions/copy.h", 1000) + .then(function(results) { + var shas = results.map(function(result) { + return result.commit.sha(); + }); + assert.equal(magicShas.length, shas.length); + magicShas.forEach(function(sha, i) { + assert.equal(sha, shas[i]); + }); + }); + }); + + it("can get the history of a file while ignoring parallel branches", + function() { + var test = this; + var magicShas = [ + "f80e085e3118bbd6aad49dad7c53bdc37088bf9b", + "907b29d8a3b765570435c922a59cd849836a7b51" + ]; + var shas; + var walker = test.repository.createRevWalk(); + walker.sorting(NodeGit.Revwalk.SORT.TIME); + walker.push("115d114e2c4d5028c7a78428f16a4528c51be7dd"); + + return walker.fileHistoryWalk("README.md", 15) + .then(function(results) { + shas = results.map(function(result) { + return result.commit.sha(); + }); + assert.equal(magicShas.length, shas.length); + magicShas.forEach(function(sha, i) { + assert.equal(sha, shas[i]); + }); + + magicShas = [ + "4a34168b80fe706f52417106821c9cbfec630e47", + "f80e085e3118bbd6aad49dad7c53bdc37088bf9b", + "694b2d703a02501f288269bea7d1a5d643a83cc8", + "907b29d8a3b765570435c922a59cd849836a7b51" + ]; + + walker = test.repository.createRevWalk(); + walker.sorting(NodeGit.Revwalk.SORT.TIME); + walker.push("d46f7da82969ca6620864d79a55b951be0540bda"); + + return walker.fileHistoryWalk("README.md", 50); + }) + .then(function(results) { + shas = results.map(function(result) { + return result.commit.sha(); + }); + assert.equal(magicShas.length, shas.length); + magicShas.forEach(function(sha, i) { + assert.equal(sha, shas[i]); + }); + }); + }); + + it("can yield information about renames in a file history walk", + function() { + var treeOid; + var repo; + var fileNameA = "a.txt"; + var fileNameB = "b.txt"; + var repoPath = local("../repos/renamedFileRepo"); + var signature = NodeGit.Signature.create("Foo bar", + "foo@bar.com", 123456789, 60); + var headCommit; + + return RepoUtils.createRepository(repoPath) + .then(function(r) { + repo = r; + return RepoUtils.commitFileToRepo( + repo, + fileNameA, + "line1\nline2\nline3\n" + ); + }) + .then(function() { + return fse.move( + path.join(repoPath, fileNameA), + path.join(repoPath, fileNameB) + ); + }) + .then(function() { + return repo.refreshIndex(); + }) + .then(function(index) { + return index.addByPath(fileNameB) + .then(function() { + return index.removeByPath(fileNameA); + }) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }); + }) + .then(function(oidResult) { + treeOid = oidResult; + return NodeGit.Reference.nameToId(repo, "HEAD"); + }) + .then(function(head) { + return repo.getCommit(head); + }) + .then(function(head) { + return repo.createCommit("HEAD", signature, signature, + "renamed commit", treeOid, [head]); + }) + .then(function() { + return NodeGit.Reference.nameToId(repo, "HEAD"); + }) + .then(function(commitOid) { + headCommit = commitOid.tostrS(); + var walker = repo.createRevWalk(); + walker.sorting(NodeGit.Revwalk.SORT.TIME); + walker.push(commitOid.tostrS()); + return walker.fileHistoryWalk(fileNameB, 5); + }) + .then(function(results) { + assert.equal(results[0].status, NodeGit.Diff.DELTA.RENAMED); + assert.equal(results[0].newName, fileNameB); + assert.equal(results[0].oldName, fileNameA); + }) + .then(function() { + var walker = repo.createRevWalk(); + walker.sorting(NodeGit.Revwalk.SORT.TIME); + walker.push(headCommit); + return walker.fileHistoryWalk(fileNameA, 5); + }) + .then(function(results) { + assert.equal(results[0].status, NodeGit.Diff.DELTA.RENAMED); + assert.equal(results[0].newName, fileNameB); + assert.equal(results[0].oldName, fileNameA); + }) + .then(function() { + return fse.remove(repoPath); + }); + }); + + it("does not leak", function() { + var test = this; + + return leakTest(NodeGit.Revwalk, function() { + return Promise.resolve(NodeGit.Revwalk.create(test.repository)); + }); + }); + + // This test requires forcing garbage collection, so mocha needs to be run + // via node rather than npm, with a la `node --expose-gc [pathtohmoca] + // [testglob]` + var testGC = global.gc ? it : it.skip; + + testGC("doesnt segfault when accessing .author() twice", function(done) { + Repository.open(reposPath).then(function(repository) { + var walker = repository.createRevWalk(); + + repository.getMasterCommit().then(function(firstCommitOnMaster) { + walker.walk(firstCommitOnMaster.id(), function(err, commit) { + if (err && err.errno === NodeGit.Error.CODE.ITEROVER) { + return done(); + } + + for (var i = 0; i < 500; i++) { + commit.author().name(); + commit.author().email(); + + if ( i % 250 === 0) { + global.gc(); + } + } + }); + }); + }); + }); + + function next(walker, count) { + var promise = null; + for (var i = 0; i < count; i++) { + if (!promise) { + promise = walker.next(); + } + else { + promise = promise.then(getNext); + } + } + return promise.catch(function(error) { + if (error && error.errno === NodeGit.Error.CODE.ITEROVER) { + return Promise.resolve(); + } else { + throw error; + } + }); + + function getNext() { + return walker.next(); + } + } +}); diff --git a/test/tests/signature.js b/test/tests/signature.js new file mode 100644 index 000000000..9049a2720 --- /dev/null +++ b/test/tests/signature.js @@ -0,0 +1,109 @@ +var assert = require("assert"); +var path = require("path"); +var local = path.join.bind(path, __dirname); + +var garbageCollect = require("../utils/garbage_collect.js"); +var exec = require("../../utils/execPromise"); + +describe("Signature", function() { + var NodeGit = require("../../"); + var Repository = NodeGit.Repository; + var Signature = NodeGit.Signature; + + var reposPath = local("../repos/workdir"); + + var name = "Bob Gnarley"; + var email = "gnarlee@bob.net"; + var arbitraryDate = 123456789; + var timezoneOffset = 60; + + it("can be created at an arbitrary time", function() { + var create = Signature.create; + var signature = create(name, email, arbitraryDate, timezoneOffset); + + assert.equal(signature.name(), name); + assert.equal(signature.email(), email); + assert.equal(signature.when().time(), arbitraryDate); + assert.equal(signature.when().offset(), 60); + }); + + it("can be created now", function() { + var signature = Signature.now(name, email); + var now = new Date(); + var when = signature.when(); + var diff = Math.abs(when.time() - now/1000); + + assert.equal(signature.name(), name); + assert.equal(signature.email(), email); + assert(diff <= 1); + + // libgit2 does its timezone offsets backwards from javascript + assert.equal(when.offset(), -now.getTimezoneOffset()); + }); + + it("can get a default signature when no user name is set", function(done) { + var savedUserName; + var savedUserEmail; + + var cleanUp = function() { + return exec("git config --global user.name \"" + savedUserName + "\"") + .then(function() { + exec("git config --global user.email \"" + savedUserEmail + "\""); + }); + }; + + return exec("git config --global user.name") + .then(function(userName) { + savedUserName = userName.trim(); + + return exec("git config --global user.email"); + }) + .then(function(userEmail) { + savedUserEmail = userEmail.trim(); + + return exec("git config --global --unset user.name"); + }) + .then(function() { + return exec("git config --global --unset user.email"); + }) + .then(function() { + return Repository.open(reposPath); + }) + .then(function(repo) { + var sig = repo.defaultSignature(); + assert.equal(sig.name(), "unknown"); + assert.equal(sig.email(), "unknown@unknown.com"); + }) + .then(cleanUp) + .then(done) + .catch(function(e) { + cleanUp() + .then(function() { + done(e); + return Promise.reject(e); + }); + }); + }); + + it("duplicates time", function() { + garbageCollect(); + var Time = NodeGit.Time; + var startSelfFreeingCount = Time.getSelfFreeingInstanceCount(); + var startNonSelfFreeingCount = + Time.getNonSelfFreeingConstructedCount(); + var time = Signature.now(name, email).when(); + + garbageCollect(); + var endSelfFreeingCount = Time.getSelfFreeingInstanceCount(); + var endNonSelfFreeingCount = Time.getNonSelfFreeingConstructedCount(); + // we should get one duplicated, self-freeing time + assert.equal(startSelfFreeingCount + 1, endSelfFreeingCount); + assert.equal(startNonSelfFreeingCount, endNonSelfFreeingCount); + + time = null; + garbageCollect(); + endSelfFreeingCount = Time.getSelfFreeingInstanceCount(); + // the self-freeing time should get freed + assert.equal(startSelfFreeingCount, endSelfFreeingCount); + }); +}); diff --git a/test/tests/stage.js b/test/tests/stage.js new file mode 100644 index 000000000..248c3e0f1 --- /dev/null +++ b/test/tests/stage.js @@ -0,0 +1,524 @@ +var assert = require("assert"); +var path = require("path"); +var promisify = require("promisify-node"); +var fse = promisify(require("fs-extra")); + +var exec = require("../../utils/execPromise"); + +describe("Stage", function() { + var RepoUtils = require("../utils/repository_setup"); + var NodeGit = require("../../"); + var test; + + beforeEach(function() { + test = this; + var repoDir = "../repos/stagingRepo/"; + var repoPath = path.resolve(__dirname, repoDir); + + return RepoUtils.createRepository(repoPath) + .then(function(repo) { + test.repository = repo; + }); + }); + + after(function() { + return fse.remove(test.repository.workdir()); + }); + + function stagingTest(isUnstaged, newFileContent, discarding) { + var fileContent = newFileContent || + "One line of text\n" + + "Two lines of text\n"+ + "Three lines of text\n"+ + "Four lines of text\n"+ + "Five lines of text\n"+ + "Six lines of text\n"+ + "Seven lines of text\n"+ + "Eight lines of text\n"+ + "Nine lines of text\n"+ + "Ten lines of text\n"+ + "Eleven lines of text\n"+ + "Twelve lines of text\n"+ + "Thirteen lines of text\n"+ + "Fourteen lines of text\n"+ + "Fifteen lines of text\n"+ + "Sixteen lines of text\n"+ + "Seventeen lines of text\n"+ + "Eighteen lines of text\n"+ + "Nineteen lines of text\n"+ + "Twenty lines of text\n"; + var fileName = "stagedLinesTest.txt"; + var expectedContent; + var workingDirFile; + var getDiffFunction; + + if (!isUnstaged || discarding) { + expectedContent = fileContent.replace("Three", "Changed three") + .replace("Seventeen", "Changed seventeen"); + workingDirFile = expectedContent.replace("Fifteen", "Changed fifteen"); + } + else { + expectedContent = fileContent.replace("Fifteen", "Changed fifteen"); + workingDirFile = expectedContent.replace("Three", "Changed three") + .replace("Seventeen", "Changed seventeen"); + } + + if (isUnstaged) { + getDiffFunction = function() { + return test.repository.refreshIndex() + .then(function(index) { + return NodeGit.Diff.indexToWorkdir( + test.repository, + index, + { + flags: + NodeGit.Diff.OPTION.SHOW_UNTRACKED_CONTENT | + NodeGit.Diff.OPTION.RECURSE_UNTRACKED_DIRS + } + ); + }); + }; + } + else { + getDiffFunction = function() { + return RepoUtils.addFileToIndex(test.repository, fileName) + .then(function() { + return test.repository.getBranchCommit("master"); + }) + .then(function(masterCommit) { + var treePromise = masterCommit.getTree(); + var indexPromise = test.repository.refreshIndex(); + + return Promise.all([treePromise, indexPromise]); + }) + .then(function(treeAndIndex) { + var masterTree = treeAndIndex[0]; + var index = treeAndIndex[1]; + return NodeGit.Diff.treeToIndex( + test.repository, + masterTree, + index, + { + flags: + NodeGit.Diff.OPTION.SHOW_UNTRACKED_CONTENT | + NodeGit.Diff.OPTION.RECURSE_UNTRACKED_DIRS + } + ); + }); + }; + } + + return RepoUtils.commitFileToRepo(test.repository, fileName, fileContent) + .then(function() { + return fse.writeFile(path.join(test.repository.workdir(), fileName), + workingDirFile); + }) + .then(function() { + return getDiffFunction(); + }) + .then(function(fileDiff) { + return fileDiff.patches(); + }) + .then(function(patches) { + var pathPatch = patches.filter(function(patch) { + return patch.newFile().path() === fileName; + }); + return pathPatch[0].hunks(); + }) + .then(function(pathHunks) { + var linePromises = []; + + pathHunks.forEach(function(pathHunk) { + linePromises.push(pathHunk.lines()); + }); + + return Promise.all(linePromises); + }) + .then(function(lines) { + var linesToStage = []; + lines.forEach(function(hunkLines) { + hunkLines.forEach(function(line) { + if (line.content().toLowerCase().indexOf("fifteen") >= 0){ + linesToStage.push(line); + } + }); + }); + + if (discarding) { + return test.repository.discardLines(fileName, linesToStage); + } + + return test.repository.stageLines(fileName, linesToStage, !isUnstaged); + }) + .then(function() { + if (discarding) { + return fse.readFile( + path.join(test.repository.workdir(), fileName), "utf8" + ); + } + + return test.repository.refreshIndex() + .then(function(reloadedIndex) { + var pathOid = reloadedIndex.getByPath(fileName).id; + return test.repository.getBlob(pathOid); + }); + }) + .then(function(resultFileContents) { + assert.equal(resultFileContents.toString(), expectedContent); + }); + } + + it("can stage selected lines", function() { + return stagingTest(true); + }); + it("can unstage selected lines", function() { + return stagingTest(false); + }); + + //This is used to test cases where there are no newline at EOF + var newlineEofTestFileContent = + "One line of text\n" + + "Two lines of text\n"+ + "Three lines of text\n"+ + "Four lines of text\n"+ + "Five lines of text\n"+ + "Six lines of text\n"+ + "Seven lines of text\n"+ + "Eight lines of text\n"+ + "Nine lines of text\n"+ + "Ten lines of text\n"+ + "Eleven lines of text\n"+ + "Twelve lines of text\n"+ + "Thirteen lines of text\n"+ + "Fourteen lines of text\n"+ + "Fifteen lines of text"; + it("can stage last line with no newline at EOF", function() { + return stagingTest(true, newlineEofTestFileContent); + }); + it("can unstage last line with no newline at EOF", function() { + return stagingTest(false, newlineEofTestFileContent); + }); + it("can stage second to last line with no newline at EOF", function() { + var newlineEofTestFileContent2 = newlineEofTestFileContent + + "\nSixteen lines of text\nSeventeen lines of text\nEighteen lines of text"; + return stagingTest(true, newlineEofTestFileContent2); + }); + it("can unstage second to last line with no newline at EOF", function() { + var newlineEofTestFileContent2 = newlineEofTestFileContent + + "\nSixteen lines of text\nSeventeen lines of text\nEighteen lines of text"; + return stagingTest(false, newlineEofTestFileContent2); + }); + + //This is used to test case where the last hunk is staged. + var lastHunkStagedFileContent = + "Thirteen lines of text\n"+ + "Fourteen lines of text\n"+ + "Fifteen lines of text\n"+ + "Sixteen lines of text\n"+ + "Shforteenteen lines of text\n"; + + it("staging last hunk stages whole file if no filemode changes", function() { + return stagingTest(true, lastHunkStagedFileContent) + .then(function() { + return test.repository.refreshIndex(); + }) + .then(function(index) { + return NodeGit.Diff.indexToWorkdir(test.repository, index, { + flags: + NodeGit.Diff.OPTION.SHOW_UNTRACKED_CONTENT | + NodeGit.Diff.OPTION.RECURSE_UNTRACKED_DIRS + }); + }) + .then(function(diff) { + assert.equal(Object.keys(diff).length, 0); //Empty diff + return diff.patches(); + }) + .then(function(patches) { + //patches will have at least one item if there is something unstaged + assert.equal(patches.length, 0); + }); + }); + + function compareFilemodes(vsWorkdir, index, fileModeDifference) { + //Takes diff of head commit vs Workdir (if vsWorkdir is set) or vs Index + //(if vsWorkdir is unset). Note: there's only one file in the filemode + //staging tests for which this helper fn was written. + //index - index to use (vsWorkdir is unset) + //fileModeDifference - expected (newfilemode) - (oldfilemode) + return test.repository.getHeadCommit() + .then(function(commit) { + return commit.getTree(); + }) + .then(function(tree) { + if (vsWorkdir) { + return NodeGit.Diff.treeToWorkdir(test.repository, tree); + } else { + return NodeGit.Diff.treeToIndex(test.repository, tree, index); + } + }) + .then(function(diff) { + return diff.getDelta(0); + }) + .then(function(delta) { + if (fileModeDifference === 0) { + if (!delta) { + return true; + } else { + throw ("File change when no file change expected."); + } + } else { + assert(delta.newFile().mode() - delta.oldFile().mode() === + fileModeDifference); + } + return true; + }); + } + + function createAndCommitFiles(repo, filePaths, fileContent, afterWriteFn) { + filePaths = filePaths instanceof Array ? filePaths : [filePaths]; + var filePromises = filePaths.map(function(fileName) { + return RepoUtils.commitFileToRepo(repo, fileName, fileContent) + .then(function() { + //First, create a file, have the same file in both the repo and workdir. + return fse.writeFile(path.join(repo.workdir(), fileName), fileContent); + }) + .then(function() { + return afterWriteFn(repo, fileName); + }); + }); + + return Promise.all(filePromises); + } + + if (process.platform == "linux" || process.platform == "darwin") { + it("can stage filemode changes for one file", function() { + var fileContent = "Blek"; + var fileName = "stageFilemodeTest.txt"; + var index; + + function afterWriteFn(repo, fileName) { + return fse.chmod(path.join(repo.workdir(), fileName), + 0755 /* new filemode */); + } + + return createAndCommitFiles( + test.repository, fileName, fileContent, afterWriteFn + ) + //Then, diff between head commit and workdir should have filemode change + .then(function() { + return compareFilemodes(true, null, 0111 /* expect +x */) + .then(function() { + return test.repository.stageFilemode(fileName, true); + }); + }) + //Now lets do a commit... + .then(function() { + return test.repository.refreshIndex(); + }) + .then(function(_index) { + index = _index; + return index.writeTree(); + }) + .then(function (oid) { + return test.repository.getHeadCommit() + .then(function(parent) { + var signature = NodeGit.Signature.create("Foo bar", + "foo@bar.com", 123456789, 60); + return test.repository.createCommit("HEAD", signature, signature, + "initial commit", oid, [parent]); + }); + //... alright, we did a commit. + }) + // Now if we compare head commit to the workdir, + // there shouldn't be a filemode change + .then(function() { + return compareFilemodes(true, null, 0); + }); + }); + + it("can unstage filemode changes", function() { + var fileContent = "Blek"; + var fileName = "stageFilemodeTest2.txt"; + var index; + + function afterWriteFn(repo, fileName) { + return fse.chmod(path.join(repo.workdir(), fileName), + 0755 /* new filemode */); + } + + return createAndCommitFiles( + test.repository, + fileName, + fileContent, + afterWriteFn + ) + //Then, diff between head commit and workdir should have filemode change + .then(function() { + return compareFilemodes(true, null, 0111 /* expect +x */); + }) + .then(function() { + return test.repository.refreshIndex(); + }) + .then(function(repoIndex) { + //Now we stage the whole file... + index = repoIndex; + return index.addByPath(fileName); + }) + .then(function() { + return index.write(); + }) + .then(function() { + //We expect the Index to have the filemode changes now. + return compareFilemodes(false, index, 0111 /* expect +x */) + .then(function() { + //...then we attempt to unstage filemode + return test.repository.stageFilemode(fileName, false /* unstage */); + }); + }) + .then(function() { + return test.repository.refreshIndex(); + }) + //We expect the Index to have no filemode changes, since we unstaged. + .then(function(freshIndex) { + return compareFilemodes(false, freshIndex, 0 /* expect +x */); + }) + //We also expect the workdir to now have the filemode change. + .then(function() { + return compareFilemodes(true, null, 0111 /* expect +x */); + }); + }); + } else if (process.platform == "win32") { + it("can stage/unstage filemode changes for one file", function() { + var fileContent = "Blek"; + var fileName = "stageFilemodeTest.txt"; + var index; + + function afterWriteFn(repo, fileName) { + //change the permission on index + return exec("git update-index --chmod=+x " + fileName, + {cwd: repo.workdir()}) + .then(function() { + //Commit the change with execute bit set + return exec("git commit -m 'test'", + {cwd: repo.workdir()}); + }) + .then(function() { + //Then, change the permission on index + return exec("git update-index --chmod=-x " + fileName, + {cwd: repo.workdir()}); + }); + } + + return createAndCommitFiles( + test.repository, fileName, fileContent, afterWriteFn + ) + .then(function() { + return test.repository.refreshIndex(); + }) + .then(function(repoIndex) { + index = repoIndex; + //Head commit vs index + //We expect the Index to have +x + return compareFilemodes(false, index, -0111 /* expect +x */); + }) + .then(function() { + //...then we attempt to unstage filemode + return test.repository.stageFilemode(fileName, false /* unstage */); + }) + .then(function() { + return test.repository.refreshIndex(); + }) + .then(function(freshIndex) { + return compareFilemodes(false, freshIndex, 0 /* expect nochange */); + }); + }); + } + + it("can stage/unstage filemode changes for multiple files", function() { + var fileContent = "Blek"; + var fileName = ["stageFilemodeTest.txt", "stageFilemodeTest2.txt"]; + var index; + + var repoWorkDir = test.repository.workdir(); + var signature = NodeGit.Signature.create("Foo bar", + "foo@bar.com", 123456789, 60); + + return Promise.all(fileName.map(function(file) { + return fse.writeFile(path.join(repoWorkDir, file), fileContent); + })) + .then(function() { + // Initial commit + return test.repository.refreshIndex(); + }) + .then(function(index) { + return fileName + .reduce(function(lastPromise, file) { + return lastPromise + .then(function() { + return index.addByPath(file); + }); + }, Promise.resolve()) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }); + }) + .then(function(oid) { + return test.repository.createCommit("HEAD", signature, signature, + "initial commit", oid, []); + }) + .then(function(commitOid) { + return test.repository.getCommit(commitOid); + }) + .then(function() { + //change the permission on index + return exec("git update-index --chmod=+x " + fileName[0], + {cwd: test.repository.workdir()}); + }) + .then(function() { + //change the permission on index + return exec("git update-index --chmod=+x " + fileName[1], + {cwd: test.repository.workdir()}); + }) + .then(function() { + //Commit the change with execute bit set + return exec("git commit -m 'test'", + {cwd: test.repository.workdir()}); + }) + .then(function() { + //Then, change the permission on index back to -x + return exec("git update-index --chmod=-x " + fileName[0], + {cwd: test.repository.workdir()}); + }) + .then(function() { + //Then, change the permission on index back to -x + return exec("git update-index --chmod=-x " + fileName[1], + {cwd: test.repository.workdir()}); + }) + .then(function() { + return test.repository.refreshIndex(); + }) + .then(function(repoIndex) { + index = repoIndex; + //Head commit vs index + //We expect the Index to have +x + return compareFilemodes(false, index, -0111 /* expect +x */); + }) + .then(function() { + //...then we attempt to unstage filemode + return test.repository.stageFilemode(fileName, false /* unstage */); + }) + .then(function() { + return test.repository.refreshIndex(); + }) + .then(function(freshIndex) { + return compareFilemodes(false, freshIndex, 0 /* expect nochange */); + }); + }); + + it("can discard selected lines", function() { + return stagingTest(true, null, true); + }); +}); diff --git a/test/tests/stash.js b/test/tests/stash.js new file mode 100644 index 000000000..1e0a1d395 --- /dev/null +++ b/test/tests/stash.js @@ -0,0 +1,246 @@ +var assert = require("assert"); +var path = require("path"); +var promisify = require("promisify-node"); +var fse = promisify(require("fs-extra")); +var local = path.join.bind(path, __dirname); + +describe("Stash", function() { + var NodeGit = require("../../"); + var Repository = NodeGit.Repository; + var Stash = NodeGit.Stash; + + var reposPath = local("../repos/workdir"); + + before(function() { + var test = this; + return Repository.open(reposPath) + .then(function(repository) { + test.repository = repository; + }); + }); + + it("gets no stashes on clean working directory", function() { + var stashes = []; + var stashCb = function(index, message, oid) { + stashes.push({index: index, message: message, oid: oid}); + }; + + return Stash.foreach(this.repository, stashCb) + .then(function() { + assert.equal(stashes.length, 0); + }); + }); + + it("can save and drop a stash", function() { + var fileName = "README.md"; + var fileContent = "Cha-cha-cha-chaaaaaangessssss"; + var repo = this.repository; + var filePath = path.join(repo.workdir(), fileName); + var oldContent; + var stashes = []; + var stashOid; + var stashMessage = "stash test"; + + return fse.readFile(filePath) + .then(function(content) { + oldContent = content; + return fse.writeFile(filePath, fileContent); + }) + .then(function() { + return Stash.save(repo, repo.defaultSignature(), stashMessage, 0); + }) + .then(function(oid) { + stashOid = oid; + var stashCb = function(index, message, oid) { + stashes.push({index: index, message: message, oid: oid}); + }; + + return Stash.foreach(repo, stashCb); + }) + .then(function() { + assert.equal(stashes.length, 1); + assert.equal(stashes[0].index, 0); + assert.equal(stashes[0].message, "On master: " + stashMessage); + assert.equal(stashes[0].oid.toString(), stashOid.toString()); + + return Stash.drop(repo, 0); + }) + .then(function () { + stashes = []; + var stashCb = function(index, message, oid) { + stashes.push({index: index, message: message, oid: oid}); + }; + + return Stash.foreach(repo, stashCb); + }) + .then(function() { + assert.equal(stashes.length, 0); + }) + .catch(function(e) { + return fse.writeFile(filePath, oldContent) + .then(function() { + return Promise.reject(e); + }); + }); + }); + + it("can save and pop a stash", function() { + var fileNameA = "README.md"; + var fileNameB = "install.js"; + var oldContentA; + var oldContentB; + var fileContent = "Cha-cha-cha-chaaaaaangessssss"; + var repo = this.repository; + var filePathA = path.join(repo.workdir(), fileNameA); + var filePathB = path.join(repo.workdir(), fileNameB); + var stashMessage = "stash test"; + + return fse.readFile(filePathA, "utf-8") + .then(function(content) { + oldContentA = content; + return fse.writeFile(filePathA, fileContent); + }) + .then(function() { + return fse.readFile(filePathB, "utf-8"); + }) + .then(function(content) { + oldContentB = content; + return fse.writeFile(filePathB, fileContent); + }) + .then(function() { + return Stash.save(repo, repo.defaultSignature(), stashMessage, 0); + }) + .then(function() { + return fse.readFile(filePathA, "utf-8"); + }) + .then(function(content) { + assert.equal(oldContentA, content); + return fse.readFile(filePathB, "utf-8"); + }) + .then(function(content) { + assert.equal(oldContentB, content); + return Stash.pop(repo, 0); + }) + .then(function() { + return fse.readFile(filePathA, "utf-8"); + }) + .then(function(content) { + assert.equal(fileContent, content); + return fse.readFile(filePathB, "utf-8"); + }) + .then(function(content) { + assert.equal(fileContent, content); + }); + }); + + it("can save a stash, change files, and fail to pop stash", function() { + var fileName = "README.md"; + var fileContent = "Cha-cha-cha-chaaaaaangessssss"; + var fileContent2 = "Somewhere over the repo, changes were made."; + var repo = this.repository; + var filePath = path.join(repo.workdir(), fileName); + var oldContent; + var stashMessage = "stash test"; + + return fse.readFile(filePath) + .then(function(content) { + oldContent = content; + return fse.writeFile(filePath, fileContent); + }) + .then(function() { + return Stash.save(repo, repo.defaultSignature(), stashMessage, 0); + }) + .then(function() { + return fse.writeFile(filePath, fileContent2); + }) + .then(function() { + return Stash.pop(repo, 0); + }) + .catch(function(reason) { + if (reason.message !== "1 conflict prevents checkout") { + throw reason; + } else { + return Promise.resolve(); + } + }); + }); + + it("can save, apply, then drop the stash", function() { + var fileName = "README.md"; + var fileContent = "Cha-cha-cha-chaaaaaangessssss"; + var repo = this.repository; + var filePath = path.join(repo.workdir(), fileName); + var oldContent; + var stashMessage = "stash test"; + + return fse.readFile(filePath) + .then(function(content) { + oldContent = content; + return fse.writeFile(filePath, fileContent); + }) + .then(function() { + return Stash.save(repo, repo.defaultSignature(), stashMessage, 0); + }) + .then(function() { + return Stash.apply(repo, 0); + }) + .then(function() { + return Stash.drop(repo, 0); + }, function() { + throw new Error("Unable to drop stash after apply."); + }) + .then(function() { + return Stash.drop(repo, 0); + }) + .catch(function(reason) { + if (reason.message !== "Reference 'refs/stash' not found") { + Promise.reject(); + } + }); + }); + + it("can save multiple stashes and pop an arbitrary stash", function() { + var fileName = "README.md"; + var fileContentA = "Hi. It's me. I'm the dog. My name is the dog."; + var fileContentB = "Everyone likes me. I'm cute."; + var fileContentC = "I think I will bark at nothing now. Ba. Ba. Baba Baba."; + var repo = this.repository; + var filePath = path.join(repo.workdir(), fileName); + var oldContent; + var stashMessageA = "stash test A"; + var stashMessageB = "stash test B"; + var stashMessageC = "stash test C"; + + function writeAndStash(path, content, message) { + return fse.writeFile(path, content) + .then(function() { + return Stash.save(repo, repo.defaultSignature(), message, 0); + }); + } + + return fse.readFile(filePath, "utf-8") + .then(function (content) { + oldContent = content; + return writeAndStash(filePath, fileContentA, stashMessageA); + }) + .then(function() { + return writeAndStash(filePath, fileContentB, stashMessageB); + }) + .then(function() { + return writeAndStash(filePath, fileContentC, stashMessageC); + }) + .then(function() { + return fse.readFile(filePath, "utf-8"); + }) + .then(function(content) { + assert.equal(oldContent, content); + return Stash.pop(repo, 1); + }) + .then(function() { + return fse.readFile(filePath, "utf-8"); + }) + .then(function(content) { + assert.equal(fileContentB, content); + }); + }); +}); diff --git a/test/tests/status.js b/test/tests/status.js new file mode 100644 index 000000000..d74c7bee7 --- /dev/null +++ b/test/tests/status.js @@ -0,0 +1,108 @@ +var assert = require("assert"); +var path = require("path"); +var promisify = require("promisify-node"); +var fse = promisify(require("fs-extra")); +var local = path.join.bind(path, __dirname); +var exec = require("../../utils/execPromise"); + +describe("Status", function() { + var NodeGit = require("../../"); + var Repository = NodeGit.Repository; + var Status = NodeGit.Status; + + var reposPath = local("../repos/workdir"); + + before(function() { + var test = this; + return Repository.open(reposPath) + .then(function(repository) { + test.repository = repository; + }); + }); + + it("gets no statuses on clean working directory", function() { + var statuses = []; + var statusCallback = function(path, status) { + statuses.push({path: path, status: status}); + }; + + return Status.foreach(this.repository, statusCallback) + .then(function() { + assert.equal(statuses.length, 0); + }); + }); + + it("gets a status on changing file directory", function() { + var fileName = "README.md"; + var fileContent = "Cha-cha-cha-chaaaaaangessssss"; + var repo = this.repository; + var filePath = path.join(repo.workdir(), fileName); + var oldContent; + var statuses = []; + + return fse.readFile(filePath) + .then(function(content) { + oldContent = content; + return fse.writeFile(filePath, fileContent); + }) + .then(function() { + var statusCallback = function(path, status) { + statuses.push({path: path, status: status}); + }; + return Status.foreach(repo, statusCallback); + }) + .then(function() { + assert.equal(statuses.length, 1); + assert.equal(statuses[0].path, fileName); + assert.equal(statuses[0].status, 256); + }) + .then(function () { + return fse.writeFile(filePath, oldContent); + }) + .catch(function(e) { + return fse.writeFile(filePath, oldContent) + .then(function() { + return Promise.reject(e); + }); + }); + }); + + it("gets status with options", function() { + var fileName = "my-new-file-that-shouldnt-exist.file"; + var fileContent = "new file from status tests"; + var repo = this.repository; + var filePath = path.join(repo.workdir(), fileName); + return exec("git clean -xdf", {cwd: reposPath}) + .then(function() { + return fse.writeFile(filePath, fileContent); + }) + .then(function() { + var statuses = []; + var statusCallback = function(path, status) { + statuses.push({path: path, status: status}); + }; + + var opts = { + flags: Status.OPT.INCLUDE_UNTRACKED + + Status.OPT.RECURSE_UNTRACKED_DIRS + }; + + return Status.foreachExt(repo, opts, statusCallback) + .then(function() { + assert.equal(statuses.length, 1); + assert.equal(statuses[0].path, fileName); + assert.equal(statuses[0].status, 128); + }); + }) + .then(function() { + return fse.remove(filePath); + }) + .catch(function(e) { + return fse.remove(filePath) + .then(function() { + return Promise.reject(e); + }); + + }); + }); +}); diff --git a/test/tests/status_file.js b/test/tests/status_file.js new file mode 100644 index 000000000..039296aac --- /dev/null +++ b/test/tests/status_file.js @@ -0,0 +1,85 @@ +var assert = require("assert"); + +describe("StatusFile", function() { + var NodeGit = require("../../"); + var Status = NodeGit.Status; + var StatusFile = NodeGit.StatusFile; + + var pathName = "README.md"; + + function testStatusFile(status) { + var statusFile = new StatusFile({ + path: pathName, + status: Status.STATUS[status] + }); + var specialFunction = status.replace(/^(WT|INDEX)_/, ""); + specialFunction = "is" + + specialFunction[0] + + specialFunction.substring(1).toLowerCase(); + if (/^WT_/.test(status)) { + assert.ok(statusFile.inWorkingTree()); + assert.ok(!statusFile.inIndex()); + } + if (/^INDEX_/.test(status)) { + assert.ok(!statusFile.inWorkingTree()); + assert.ok(statusFile.inIndex()); + } + assert.equal(statusFile.path(), pathName); + assert.equal(statusFile.statusBit(), Status.STATUS[status]); + assert.equal(statusFile.status(), status); + assert.ok(statusFile[specialFunction]()); + } + + it.skip("identifies the proper statuses for CURRENT", function() { + testStatusFile("CURRENT"); + }); + + it.skip("identifies the proper statuses for WT_UNREADABLE", function() { + testStatusFile("WT_UNREADABLE"); + }); + + it("identifies the proper statuses for WT_NEW", function() { + testStatusFile("WT_NEW"); + }); + + it("identifies the proper statuses for WT_MODIFIED", function() { + testStatusFile("WT_MODIFIED"); + }); + + it("identifies the proper statuses for WT_DELETED", function() { + testStatusFile("WT_DELETED"); + }); + + it("identifies the proper statuses for WT_TYPECHANGE", function() { + testStatusFile("WT_TYPECHANGE"); + }); + + it("identifies the proper statuses for WT_RENAMED", function() { + testStatusFile("WT_RENAMED"); + }); + + it("identifies the proper statuses for IGNORED", function() { + testStatusFile("IGNORED"); + }); + + it("identifies the proper statuses for INDEX_NEW", function() { + testStatusFile("INDEX_NEW"); + }); + + it("identifies the proper statuses for INDEX_MODIFIED", function() { + testStatusFile("INDEX_MODIFIED"); + }); + + it("identifies the proper statuses for INDEX_DELETED", function() { + testStatusFile("INDEX_DELETED"); + }); + + it("identifies the proper statuses for INDEX_TYPECHANGE", function() { + testStatusFile("INDEX_TYPECHANGE"); + }); + + it("identifies the proper statuses for INDEX_RENAMED", function() { + testStatusFile("INDEX_RENAMED"); + }); + +}); diff --git a/test/tests/status_list.js b/test/tests/status_list.js new file mode 100644 index 000000000..be96952a2 --- /dev/null +++ b/test/tests/status_list.js @@ -0,0 +1,60 @@ +var assert = require("assert"); +var path = require("path"); +var promisify = require("promisify-node"); +var fse = promisify(require("fs-extra")); +var local = path.join.bind(path, __dirname); +var exec = require("../../utils/execPromise"); + +describe("StatusList", function() { + var NodeGit = require("../../"); + var Repository = NodeGit.Repository; + var Status = NodeGit.Status; + var StatusList = NodeGit.StatusList; + + var reposPath = local("../repos/workdir"); + + before(function() { + var test = this; + return Repository.open(reposPath) + .then(function(repository) { + test.repository = repository; + }); + }); + + it("gets status with deltas", function() { + var fileName = "my-new-file-that-shouldnt-exist.file"; + var fileContent = "new file from status tests"; + var repo = this.repository; + var filePath = path.join(repo.workdir(), fileName); + return exec("git clean -xdf", {cwd: reposPath}) + .then(function() { + return fse.writeFile(filePath, fileContent); + }) + .then(function() { + var opts = { + flags: Status.OPT.INCLUDE_UNTRACKED + + Status.OPT.RECURSE_UNTRACKED_DIRS + }; + + return StatusList.create(repo, opts); + }) + .then(function(list) { + assert.equal(list.entrycount(), 1); + + for (var i = 0; i < list.entrycount(); i++) { + var entry = Status.byIndex(list, i); + assert.equal(entry.indexToWorkdir().newFile().path(), fileName); + } + }) + .then(function() { + return fse.remove(filePath); + }) + .catch(function(e) { + return fse.remove(filePath) + .then(function() { + return Promise.reject(e); + }); + + }); + }); +}); diff --git a/test/tests/submodule.js b/test/tests/submodule.js new file mode 100644 index 000000000..f42758b12 --- /dev/null +++ b/test/tests/submodule.js @@ -0,0 +1,160 @@ +var assert = require("assert"); +var path = require("path"); +var local = path.join.bind(path, __dirname); + +describe("Submodule", function() { + var NodeGit = require("../../"); + var Repository = NodeGit.Repository; + var RepoUtils = require("../utils/repository_setup"); + var Submodule = NodeGit.Submodule; + + var repoPath = local("../repos/submodule"); + + beforeEach(function() { + var test = this; + + return RepoUtils.createRepository(repoPath) + .then(function(repo) { + test.repository = repo; + return Repository.open(local("../repos/workdir")); + }) + .then(function(repo) { + test.workdirRepository = repo; + }); + }); + + it("can walk over the submodules", function() { + var repo = this.workdirRepository; + var submoduleName = "vendor/libgit2"; + + return repo.getSubmoduleNames() + .then(function(submodules) { + assert.equal(submodules.length, 1); + + var submodule = submodules[0]; + assert.equal(submodule, submoduleName); + return submodule; + }) + .then(function(submodule) { + return Submodule.lookup(repo, submodule); + }) + .then(function(submodule) { + assert.equal(submodule.name(), submoduleName); + }); + }); + + it("can get submodule status", function() { + var repo = this.workdirRepository; + var submoduleName = "vendor/libgit2"; + + return Submodule.status(repo, submoduleName, Submodule.IGNORE.NONE) + .then(function(status) { + assert.equal(Submodule.STATUS.IN_CONFIG, status); + }); + }); + + it("can get submodule location", function() { + var repo = this.workdirRepository; + var submoduleName = "vendor/libgit2"; + + return Submodule.lookup(repo, submoduleName) + .then(function(submodule) { + return submodule.location(); + }) + .then(function(status) { + assert.equal(Submodule.STATUS.IN_CONFIG, status); + }); + }); + + it("can set submodule ignore", function() { + var repo = this.workdirRepository; + var submoduleName = "vendor/libgit2"; + + return Submodule.setIgnore(repo, submoduleName, Submodule.IGNORE.ALL) + .then(function() { + return Submodule.lookup(repo, submoduleName); + }) + .then(function(submodule) { + assert.equal(Submodule.IGNORE.ALL, submodule.ignore()); + }); + }); + + it("can set submodule url", function() { + var repo = this.workdirRepository; + var submoduleName = "vendor/libgit2"; + var submoduleUrl = "https://github.com/githubtraining/hellogitworld.git"; + + return Submodule.setUrl(repo, submoduleName, submoduleUrl) + .then(function() { + return Submodule.lookup(repo, submoduleName); + }) + .then(function(submodule) { + assert.equal(submoduleUrl, submodule.url()); + }); + }); + + it("can set submodule update", function() { + var repo = this.workdirRepository; + var submoduleName = "vendor/libgit2"; + + return Submodule.setUpdate(repo, submoduleName, Submodule.UPDATE.NONE) + .then(function() { + return Submodule.lookup(repo, submoduleName); + }) + .then(function(submodule) { + assert.equal(Submodule.UPDATE.NONE, submodule.updateStrategy()); + }); + }); + + it("can setup and finalize submodule add", function() { + this.timeout(30000); + + var repo = this.repository; + var submodulePath = "nodegittest"; + var submoduleUrl = "https://github.com/nodegit/test.git"; + + var submodule; + var submoduleRepo; + + return NodeGit.Submodule.addSetup(repo, submoduleUrl, submodulePath, 0) + .then(function(_submodule) { + submodule = _submodule; + + return submodule.init(0); + }) + .then(function() { + return submodule.open(); + }) + .then(function(_submoduleRepo) { + submoduleRepo = _submoduleRepo; + return submoduleRepo.fetch("origin", null, null); + }) + .then(function() { + return submoduleRepo.getReference("origin/master"); + }) + .then(function(reference) { + return reference.peel(NodeGit.Object.TYPE.COMMIT); + }) + .then(function(commit) { + return submoduleRepo.createBranch("master", commit.id()); + }) + .then(function() { + return submodule.addFinalize(); + }) + .then(function() { + // check whether the submodule exists + return Submodule.lookup(repo, submodulePath); + }) + .then(function(submodule) { + assert.equal(submodule.name(), submodulePath); + // check whether .gitmodules and the submodule are in the index + return repo.refreshIndex(); + }) + .then(function(index) { + var entries = index.entries(); + assert.equal(entries.length, 2); + assert.equal(entries[0].path, ".gitmodules"); + assert.equal(entries[1].path, submodulePath); + }); + }); +}); diff --git a/test/tests/tag.js b/test/tests/tag.js new file mode 100644 index 000000000..77b411f45 --- /dev/null +++ b/test/tests/tag.js @@ -0,0 +1,147 @@ +var assert = require("assert"); +var path = require("path"); +var local = path.join.bind(path, __dirname); + +describe("Tag", function() { + var NodeGit = require("../../"); + var Repository = NodeGit.Repository; + var Tag = NodeGit.Tag; + var Obj = NodeGit.Object; + var Oid = NodeGit.Oid; + var Reference = NodeGit.Reference; + + var reposPath = local("../repos/workdir"); + var tagName = "annotated-tag"; + var tagFullName = "refs/tags/" + tagName; + var tagOid = "dc800017566123ff3c746b37284a24a66546667e"; + var commitPointedTo = "32789a79e71fbc9e04d3eff7425e1771eb595150"; + var tagMessage = "This is an annotated tag\n"; + + function testTag(tag, name) { + assert.equal(tag.name(), name || tagName); + assert.equal(tag.targetType(), Obj.TYPE.COMMIT); + assert.equal(tag.message(), tagMessage); + + var target = tag.target(); + + assert.ok(target.isCommit()); + assert.equal(target.id().toString(), commitPointedTo); + } + + beforeEach(function() { + var test = this; + + return Repository.open(reposPath) + .then(function(repo) { + test.repository = repo; + }); + }); + + it("can get a tag from a repo via the tag name", function() { + return this.repository.getTagByName(tagName) + .then(function(tag) { + testTag(tag); + }); + }); + + it("can get a tag from a repo via the long tag name", function() { + return this.repository.getTagByName(tagFullName) + .then(function(tag) { + testTag(tag); + }); + }); + + it("can get a tag from a repo via the tag's OID as a string", function() { + return this.repository.getTag(tagOid) + .then(function(tag) { + testTag(tag); + }); + }); + + it("can get a tag from a repo via the tag's OID object", function() { + var oid = Oid.fromString(tagOid); + + return this.repository.getTag(oid) + .then(function(tag) { + testTag(tag); + }); + }); + + it("can list tags in a repo", function() { + return Tag.list(this.repository) + .then(function(tagNames) { + tagNames = tagNames.filter(function(tagNameTest) { + return tagNameTest == tagName; + }); + + assert.equal(tagNames.length, 1); + }); + }); + + it("can create a new annotated tag in a repo and delete it", function() { + var oid = Oid.fromString(commitPointedTo); + var name = "created-annotated-tag"; + var repository = this.repository; + + return repository.createTag(oid, name, tagMessage) + .then(function(tag) { + testTag(tag, name); + }) + .then(function() { + return repository.createTag(oid, name, tagMessage); + }) + .then(function() { + return Promise.reject(new Error("should not be able to create the '" + + name + "' tag twice")); + }, function() { + return Promise.resolve(); + }) + .then(function() { + return repository.deleteTagByName(name); + }) + .then(function() { + return Reference.lookup(repository, "refs/tags/" + name); + }) + .then(function() { + return Promise.reject(new Error("the tag '" + name + + "' should not exist")); + }, function() { + return Promise.resolve(); + }); + }); + + it("can create a new lightweight tag in a repo and delete it", function() { + var oid = Oid.fromString(commitPointedTo); + var name = "created-lightweight-tag"; + var repository = this.repository; + + return repository.createLightweightTag(oid, name) + .then(function(reference) { + return reference.target(); + }) + .then(function(refOid) { + assert.equal(refOid.toString(), oid.toString()); + }) + .then(function() { + return repository.createLightweightTag(oid, name); + }) + .then(function() { + return Promise.reject(new Error("should not be able to create the '" + + name + "' tag twice")); + }, function() { + return Promise.resolve(); + }) + .then(function() { + return repository.deleteTagByName(name); + }) + .then(function() { + return Reference.lookup(repository, "refs/tags/" + name); + }) + .then(function() { + return Promise.reject(new Error("the tag '" + name + + "' should not exist")); + }, function() { + return Promise.resolve(); + }); + }); +}); diff --git a/test/tests/thread_safety.js b/test/tests/thread_safety.js new file mode 100644 index 000000000..257c2d51e --- /dev/null +++ b/test/tests/thread_safety.js @@ -0,0 +1,65 @@ +var assert = require("assert"); +var path = require("path"); +var local = path.join.bind(path, __dirname); + +describe("ThreadSafety", function() { + var NodeGit = require("../../"); + var Repository = NodeGit.Repository; + + var reposPath = local("../repos/workdir"); + + beforeEach(function() { + var test = this; + + return Repository.open(reposPath) + .then(function(repo) { + test.repository = repo; + return repo.refreshIndex(); + }) + .then(function(index) { + test.index = index; + }); + }); + + it("can enable and disable thread safety", function() { + var originalValue = NodeGit.getThreadSafetyStatus(); + + NodeGit.enableThreadSafety(); + assert.equal(NodeGit.THREAD_SAFETY.ENABLED, + NodeGit.getThreadSafetyStatus()); + + NodeGit.setThreadSafetyStatus(NodeGit.THREAD_SAFETY.ENABLED_FOR_ASYNC_ONLY); + assert.equal(NodeGit.THREAD_SAFETY.ENABLED_FOR_ASYNC_ONLY, + NodeGit.getThreadSafetyStatus()); + + NodeGit.setThreadSafetyStatus(NodeGit.THREAD_SAFETY.DISABLED); + assert.equal(NodeGit.THREAD_SAFETY.DISABLED, + NodeGit.getThreadSafetyStatus()); + + NodeGit.setThreadSafetyStatus(originalValue); + }); + + it("can lock something and cleanup mutex", function() { + var diagnostics = NodeGit.getThreadSafetyDiagnostics(); + var originalCount = diagnostics.storedMutexesCount; + // call a sync method to guarantee that it stores a mutex, + // and that it will clean up the mutex in a garbage collection cycle + this.repository.headDetached(); + + diagnostics = NodeGit.getThreadSafetyDiagnostics(); + switch(NodeGit.getThreadSafetyStatus()) { + case NodeGit.THREAD_SAFETY.ENABLED: + // this is a fairly vague test - it just tests that something + // had a mutex created for it at some point (i.e., the thread safety + // code is not completely dead) + assert.ok(diagnostics.storedMutexesCount > 0); + break; + case NodeGit.THREAD_SAFETY.ENABLED_FOR_ASYNC_ONLY: + assert.equal(originalCount, diagnostics.storedMutexesCount); + break; + + case NodeGit.THREAD_SAFETY.DISABLED: + assert.equal(0, diagnostics.storedMutexesCount); + } + }); +}); diff --git a/test/tests/tree.js b/test/tests/tree.js new file mode 100644 index 000000000..aa98d4f3d --- /dev/null +++ b/test/tests/tree.js @@ -0,0 +1,99 @@ +var assert = require("assert"); +var path = require("path"); +var local = path.join.bind(path, __dirname); +var promisify = require("promisify-node"); +var fse = promisify(require("fs-extra")); + +describe("Tree", function() { + var NodeGit = require("../../"); + var RepoUtils = require("../utils/repository_setup"); + + var repoPath = local("../repos/tree"); + var existingPath = local("../repos/workdir"); + var oid = "5716e9757886eaf38d51c86b192258c960d9cfea"; + + beforeEach(function() { + var test = this; + return RepoUtils.createRepository(repoPath) + .then(function(repo) { + test.repository = repo; + }).then(function() { + return NodeGit.Repository.open(existingPath); + }).then(function(repository) { + test.existingRepo = repository; + return repository.getCommit(oid); + }).then(function(commit) { + test.commit = commit; + }); + }); + + after(function() { + return fse.remove(repoPath); + }); + + it("gets an entry by name", + function(done) { + this.commit.getTree().then(function(tree) { + var entry = tree.entryByName("README.md"); + assert(entry); + }).done(done); + }); + + it("walks its entries and returns the same entries on both progress and end", + function() { + var repo = this.repository; + var file1 = "test.txt"; + var file2 = "foo/bar.txt"; + // index.addByPath doesn't like \s so normalize only for the expected paths + var expectedPaths = [file1, path.normalize(file2)]; + var progressEntries = []; + var endEntries; + + return RepoUtils.commitFileToRepo(repo, file1, "") + .then(function(commit) { + return RepoUtils.commitFileToRepo(repo, file2, "", commit); + }) + .then(function(commit) { + return commit.getTree(); + }) + .then(function(tree) { + assert(tree); + + return new Promise(function (resolve, reject) { + var walker = tree.walk(); + + walker.on("entry", function(entry) { + progressEntries.push(entry); + }); + walker.on("end", function(entries) { + endEntries = entries; + resolve(); + }); + walker.on("error", reject); + + walker.start(); + }); + }) + .then(function() { + assert(progressEntries.length); + assert(endEntries && endEntries.length); + + function getEntryPath(entry) { + return entry.path(); + } + + var progressFilePaths = progressEntries.map(getEntryPath); + var endFilePaths = endEntries.map(getEntryPath); + + assert.deepEqual( + expectedPaths, progressFilePaths, + "progress entry paths do not match expected paths" + ); + + assert.deepEqual( + expectedPaths, endFilePaths, + "end entry paths do not match expected paths" + ); + }); + }); +}); diff --git a/test/tests/tree_entry.js b/test/tests/tree_entry.js new file mode 100644 index 000000000..b4d0706c5 --- /dev/null +++ b/test/tests/tree_entry.js @@ -0,0 +1,169 @@ +var assert = require("assert"); +var path = require("path"); +var local = path.join.bind(path, __dirname); + +var leakTest = require("../utils/leak_test"); + +describe("TreeEntry", function() { + var NodeGit = require("../../"); + var Repository = NodeGit.Repository; + var Tree = NodeGit.Tree; + + var reposPath = local("../repos/workdir"); + var oid = "5716e9757886eaf38d51c86b192258c960d9cfea"; + + beforeEach(function() { + var test = this; + + return Repository.open(reposPath) + .then(function(repository) { + test.repository = repository; + + return repository.getCommit(oid); + }) + .then(function(commit) { + test.commit = commit; + }); + }); + + it("will fail on a missing file", function() { + return this.commit.getEntry("test/-entry.js") + .then(null, function(err) { + assert.ok(err instanceof Error); + }); + }); + + it("provides the correct sha for a file", function() { + return this.commit.getEntry("README.md") + .then(function(entry) { + assert.equal(entry.sha(), "6cb45ba5d32532bf0d1310dc31ca4f20f59964bc"); + }); + }); + + it("provides the correct length for a file", function() { + return this.commit.getEntry("README.md") + .then(function(entry) { + assert.equal(entry.name().length, 9); + }); + }); + + it("provides the filename", function() { + return this.commit.getEntry("test/raw-commit.js") + .then(function(entry) { + assert.equal(entry.name(), "raw-commit.js"); + }); + }); + + it("provides the full path", function() { + return this.commit.getEntry("test/raw-commit.js") + .then(function(entry) { + assert.equal(entry.path(), path.normalize("test/raw-commit.js")); + }); + }); + + it("provides the full path when the entry came from a tree", function(done) { + var testTree = function(tree, _dir) { + var dir = _dir || "", + testPromises = []; + tree.entries().forEach(function(entry) { + var currentPath = path.join(dir, entry.name()); + if (entry.isTree()) { + testPromises.push( + entry.getTree().then(function (subtree) { + return testTree(subtree, currentPath); + }) + ); + } else { + assert.equal(entry.path(), currentPath); + } + }); + + return Promise.all(testPromises); + }; + + return this.commit.getTree() + .then(testTree) + .done(function() { + done(); + }); + }); + + it("provides the blob representation of the entry", function() { + return this.commit.getEntry("test/raw-commit.js") + .then(function(entry) { + return entry.getBlob(); + }) + .then(function(blob) { + assert.equal(blob.rawsize(), 2736); + }); + }); + + it("provides the blob representation via callback", function() { + return this.commit.getEntry("test/raw-commit.js") + .then(function(entry) { + entry.getBlob(function (error, blob) { + assert.equal(blob.rawsize(), 2736); + }); + }); + }); + + it("provides the tree the entry is part of", function() { + return this.commit.getEntry("test") + .then(function(entry) { + return entry.getTree(); + }) + .then(function(tree) { + assert.ok(tree instanceof Tree); + }); + }); + + it("can determine if an entry is a file", function() { + return this.commit.getEntry("README.md") + .then(function(entry) { + assert.ok(entry.isFile()); + }); + }); + + it("can determine if an entry is not a file", function() { + return this.commit.getEntry("example") + .then(function(entry) { + assert.equal(entry.isFile(), false); + }); + }); + + it("can determine if an entry is a directory", function() { + return this.commit.getEntry("example") + .then(function(entry) { + assert.equal(entry.isDirectory(), true); + }); + }); + + it("can determine if an entry is a submodule", function() { + var repo = this.repository; + return repo.getCommit("878ef6efbc5f85c4f63aeedf41addc262a621308") + .then(function(commit) { + return commit.getEntry("vendor/libgit2") + .then(function(entry) { + assert.equal(entry.isSubmodule(), true); + }); + }); + }); + + it("can determine if an entry is not a submodule", function() { + return this.commit.getEntry("example") + .then(function(entry) { + assert.equal(entry.isSubmodule(), false); + }); + }); + + it("does not leak", function() { + var test = this; + + return leakTest(NodeGit.TreeEntry, function() { + return test.commit.getTree() + .then(function(tree) { + return tree.entryByPath("example"); + }); + }); + }); +}); diff --git a/test/tests/treebuilder.js b/test/tests/treebuilder.js new file mode 100644 index 000000000..5e1ce6263 --- /dev/null +++ b/test/tests/treebuilder.js @@ -0,0 +1,79 @@ +var assert = require("assert"); +var path = require("path"); +var fs = require("fs"); +var promisify = require("promisify-node"); +var readDir = promisify(fs.readdir); +var local = path.join.bind(path, __dirname); + +describe("TreeBuilder", function(){ + + var Git = require("../../"); + var reposPath = local("../repos/workdir"); + //setup test repo each test + beforeEach(function() { + var test = this; + + return Git.Repository.open(reposPath) + .then(function(repo) { + test.repo = repo; + }); + }); + //treebuilder created with no source when creating a new folder + // (each folder in git is a tree) + // or the root folder for a root commit + it("Can create a new treebuilder with no source", function(){ + + return Git.Treebuilder.create(this.repo, null); + }); + //treebuilder created with a source tree can add / read from tree + it("Can create a treebuilder from the latest commit tree", function(){ + + var test = this; + //get latest commit + return test.repo.getHeadCommit() + //get tree of commit + .then(function(commit){ return commit.getTree(); }) + //make treebuilder from tree + .then(function(tree){ return Git.Treebuilder.create(test.repo, tree); }) + //verify treebuilder can do stuff + .then(function(treeBuilder){ + //check + //count how many entries we should have + return readDir(reposPath) + //treebuilder should have all entries in the clean working dir + //(minus .git folder) + .then(function(dirEntries) { + return assert.equal(dirEntries.length-1, treeBuilder.entrycount()); + }); + }); + }); + //adding a tree is adding a folder + it("Can add a new tree to an existing tree", function(){ + + var test = this; + //get latest commit + return test.repo.getHeadCommit() + //get tree of commit + .then(function(commit){ return commit.getTree(); }) + //make treebuilder from tree + .then(function(tree){ return Git.Treebuilder.create(test.repo, tree); }) + //verify treebuilder can do stuff + .then(function(rootTreeBuilder){ + //new dir builder + return Git.Treebuilder.create(test.repo, null) + .then(function(newTreeBuilder){ + //insert new dir + return rootTreeBuilder.insert( + "mynewfolder", + newTreeBuilder.write(), + Git.TreeEntry.FILEMODE.TREE + ); + }); + }) + .then(function(newTreeEntry){ + assert(newTreeEntry.isTree(), + "Created a tree (new folder) that is a tree"); + return Git.Tree.lookup(test.repo, newTreeEntry.oid()); + }); + }); +}); diff --git a/test/tree.js b/test/tree.js deleted file mode 100644 index a7c55fef0..000000000 --- a/test/tree.js +++ /dev/null @@ -1,63 +0,0 @@ -var git = require('../'), - rimraf = require('rimraf'), - fs = require('fs'), - path = require('path'); - -var sha = '5716e9757886eaf38d51c86b192258c960d9cfea'; -var fileCount = 512; // Number of blob & blob executabless - -exports.walk = function(test) { - test.expect(515); - - git.Repo.open('repos/workdir/.git', function(error, repo) { - repo.getCommit(sha, function(error, commit) { - var entryCount = 0; - commit.getTree(function(error, tree) { - tree.walk().on('entry', function(index, entry) { - test.equals(error, null, 'There should be no error'); - entryCount++; - }).on('end', function(errors, entries) { - test.equals(errors, null, 'There should be no error'); - test.equals(entryCount, fileCount, 'The manual tree entry count and the "end" tree entry count do not match'); - test.equals(entries.length, fileCount, 'The end entries count and the manual entry count do not match'); - test.done(); - }).start(); - }); - }); - }); -}; - -exports.insert = function(test) { - test.expect(1); - - git.Repo.open('repos/workdir/.git', function(error, repo) { - repo.getCommit(sha, function(error, commit) { - commit.getTree(function(error, tree) { - var text = "this is a file\n", - buffer = new Buffer(text); - repo.createBlobFromBuffer(buffer, function(error, blobId) { - var builder = tree.builder(); - builder.insert(path.join("lib", "baz", "bar.txt"), blobId, git.TreeEntry.FileMode.Blob); - builder.write(function(error, treeId) { - repo.getTree(treeId, function(error, tree) { - var author = git.Signature.create("Scott Chacon", "schacon@gmail.com", 123456789, 60), - committer = git.Signature.create("Scott A Chacon", "scott@github.com", 987654321, 90); - repo.createCommit(null, author, committer, "message", tree, [commit], function(error, commitId) { - repo.getCommit(commitId, function(error, commit) { - commit.getTree(function(error, tree) { - tree.getEntry('lib/baz/bar.txt', function(error, entry) { - entry.getBlob(function(error, blob) { - test.equals(blob.toString(), text); - test.done(); - }); - }); - }); - }); - }); - }); - }); - }); - }); - }); - }); -}; diff --git a/test/tree_entry.js b/test/tree_entry.js deleted file mode 100644 index eb00f9c93..000000000 --- a/test/tree_entry.js +++ /dev/null @@ -1,83 +0,0 @@ -var git = require('../'); - -var sha = '5716e9757886eaf38d51c86b192258c960d9cfea'; - -var getEntry = function(path, callback) { - git.Repo.open('repos/workdir/.git', function(error, repo) { - repo.getCommit(sha, function(error, commit) { - commit.getEntry(path, callback); - }); - }); -}; - -exports.missingFile = function(test) { - test.expect(1); - - getEntry('test/convenience-entry.js', function(error, entry) { - test.notEqual(error, null, 'Missing file should error'); - test.done(); - }); -}; - -exports.sha = function(test) { - test.expect(1); - getEntry('README.md', function(error, entry) { - var sha = entry.sha(); - test.equal(sha, '6cb45ba5d32532bf0d1310dc31ca4f20f59964bc', 'Entry SHA should match expected value'); - test.done(); - }); -}; - -exports.isFile = function(test) { - test.expect(2); - getEntry('README.md', function(error, entry) { - var isFile = entry.isFile(); - test.equal(isFile, true, 'Entry is a file'); - getEntry('example', function(error, entry) { - var isFile = entry.isFile(); - test.equal(isFile, false, 'Entry is a directory'); - test.done(); - }); - }); -}; - -exports.isDirectory = function(test) { - test.expect(2); - getEntry('example', function(error, entry) { - test.equal(entry.isFile(), false, 'Entry is a directory'); - getEntry('README.md', function(error, entry) { - test.equal(entry.isFile(), true, 'Entry is a file'); - test.done(); - }); - }); -}; - -exports.name = function(test) { - test.expect(2); - getEntry('test/raw-commit.js', function(error, entry) { - test.equal(error, null, 'Should not error'); - var name = entry.name(); - test.equal(name, 'raw-commit.js', 'Name should match expected value'); - test.done(); - }); -}; - -exports.getBlob = function(test) { - test.expect(1); - getEntry('test/raw-commit.js', function(error, entry) { - entry.getBlob(function(error, blob) { - test.equal(blob.size(), 2736, 'Content length should match expected value'); - test.done(); - }); - }); -}; - -exports.getTree = function(test) { - test.expect(1); - getEntry('test', function(error, entry) { - entry.getTree(function(error, tree) { - test.equal(tree instanceof git.Tree, true, 'Expected instance of Tree'); - test.done(); - }); - }); -}; diff --git a/test/utils/garbage_collect.js b/test/utils/garbage_collect.js new file mode 100644 index 000000000..a288b99c8 --- /dev/null +++ b/test/utils/garbage_collect.js @@ -0,0 +1,20 @@ +// aggressively collects garbage until we fail to improve terminatingIterations +// times. +function garbageCollect() { + var terminatingIterations = 3; + var usedBeforeGC = Number.MAX_VALUE; + var nondecreasingIterations = 0; + for ( ; ; ) { + global.gc(); + var usedAfterGC = process.memoryUsage().heapUsed; + if (usedAfterGC >= usedBeforeGC) { + nondecreasingIterations++; + if (nondecreasingIterations >= terminatingIterations) { + break; + } + } + usedBeforeGC = usedAfterGC; + } +} + +module.exports = garbageCollect; diff --git a/test/utils/index_setup.js b/test/utils/index_setup.js new file mode 100644 index 000000000..eafcd5b52 --- /dev/null +++ b/test/utils/index_setup.js @@ -0,0 +1,93 @@ +var NodeGit = require("../../"); +var path = require("path"); +var promisify = require("promisify-node"); +var fse = promisify(require("fs-extra")); +var RepoUtils = require("../utils/repository_setup"); + +var IndexSetup = { + createConflict: function createConflict( + repository, + _ourBranchName, + _theirBranchName, + _fileName + ) { + var fileName = _fileName || "everyonesFile.txt"; + + var ourBranchName = _ourBranchName || "ours"; + var theirBranchName = _theirBranchName || "theirs"; + + var baseFileContent = "How do you feel about Toll Roads?\n"; + var ourFileContent = "I like Toll Roads. I have an EZ-Pass!\n"; + var theirFileContent = "I'm skeptical about Toll Roads\n"; + + var ourSignature = NodeGit.Signature.create + ("Ron Paul", "RonPaul@TollRoadsRBest.info", 123456789, 60); + var theirSignature = NodeGit.Signature.create + ("Greg Abbott", "Gregggg@IllTollYourFace.us", 123456789, 60); + + var ourCommit; + var ourBranch; + var theirBranch; + + return fse.writeFile( + path.join(repository.workdir(), fileName), + baseFileContent + ) + .then(function() { + return RepoUtils.addFileToIndex(repository, fileName); + }) + .then(function(oid) { + return repository.createCommit("HEAD", ourSignature, + ourSignature, "initial commit", oid, []); + }) + .then(function(commitOid) { + return repository.getCommit(commitOid).then(function(commit) { + ourCommit = commit; + }).then(function() { + return repository.createBranch(ourBranchName, commitOid) + .then(function(branch) { + ourBranch = branch; + return repository.createBranch(theirBranchName, commitOid); + }); + }); + }) + .then(function(branch) { + theirBranch = branch; + return fse.writeFile(path.join(repository.workdir(), fileName), + baseFileContent + theirFileContent); + }) + .then(function() { + return RepoUtils.addFileToIndex(repository, fileName); + }) + .then(function(oid) { + return repository.createCommit(theirBranch.name(), theirSignature, + theirSignature, "they made a commit", oid, [ourCommit]); + }) + .then(function(commitOid) { + return fse.writeFile(path.join(repository.workdir(), fileName), + baseFileContent + ourFileContent); + }) + .then(function() { + return RepoUtils.addFileToIndex(repository, fileName); + }) + .then(function(oid) { + return repository.createCommit(ourBranch.name(), ourSignature, + ourSignature, "we made a commit", oid, [ourCommit]); + }) + .then(function() { + return repository.checkoutBranch( + ourBranch, + new NodeGit.CheckoutOptions() + ); + }) + .then(function() { + return repository.mergeBranches(ourBranchName, theirBranchName); + }) + .catch(function(index) { + return NodeGit.Checkout.index(repository, index) + .then(function() { return index; }); + }); + } +}; + +module.exports = IndexSetup; diff --git a/test/utils/leak_test.js b/test/utils/leak_test.js new file mode 100644 index 000000000..a784facb1 --- /dev/null +++ b/test/utils/leak_test.js @@ -0,0 +1,33 @@ +var assert = require("assert"); + +var garbageCollect = require("./garbage_collect"); + +function leakTest(Type, getInstance) { + garbageCollect(); + var startSelfFreeingCount = Type.getSelfFreeingInstanceCount(); + var startNonSelfFreeingCount = Type.getNonSelfFreeingConstructedCount(); + + var resolve; + var promise = new Promise(function(_resolve) { resolve = _resolve; }); + + getInstance() + .then(function() { + var selfFreeingCount = Type.getSelfFreeingInstanceCount(); + assert.equal(startSelfFreeingCount + 1, selfFreeingCount); + // get out of this promise chain to help GC get rid of the commit + setTimeout(resolve, 0); + }); + + return promise + .then(function() { + garbageCollect(); + var endSelfFreeingCount = Type.getSelfFreeingInstanceCount(); + var endNonSelfFreeingCount = Type.getNonSelfFreeingConstructedCount(); + // any new self-freeing commits should have been freed + assert.equal(startSelfFreeingCount, endSelfFreeingCount); + // no new non-self-freeing commits should have been constructed + assert.equal(startNonSelfFreeingCount, endNonSelfFreeingCount); + }); +} + +module.exports = leakTest; diff --git a/test/utils/repository_setup.js b/test/utils/repository_setup.js new file mode 100644 index 000000000..c5d6ab3e2 --- /dev/null +++ b/test/utils/repository_setup.js @@ -0,0 +1,200 @@ +var assert = require("assert"); +var NodeGit = require("../../"); +var path = require("path"); +var promisify = require("promisify-node"); +var fse = promisify(require("fs-extra")); + +var RepositorySetup = { + addFileToIndex: + function addFileToIndex(repository, fileName) { + return repository.refreshIndex() + .then(function(index) { + return index.addByPath(fileName) + .then(function() { + return index.write(); + }) + .then(function() { + return index.writeTree(); + }); + }); + }, + + commitFileToRepo: + function commitFileToRepo(repository, fileName, fileContent, parentCommit) { + var repoWorkDir = repository.workdir(); + var signature = NodeGit.Signature.create("Foo bar", + "foo@bar.com", 123456789, 60); + + var filePath = path.join(repoWorkDir, fileName); + var parents = []; + if (parentCommit) { + parents.push(parentCommit); + } + + // fse.ensure allows us to write files inside new folders + return fse.ensureFile(filePath) + .then(function() { + return fse.writeFile(filePath, fileContent); + }) + .then(function() { + return RepositorySetup.addFileToIndex(repository, fileName); + }) + .then(function(oid) { + return repository.createCommit("HEAD", signature, signature, + "initial commit", oid, parents); + }) + .then(function(commitOid) { + return repository.getCommit(commitOid); + }); + }, + + createRepository: + function createRepository(repoPath){ + // Create a new repository in a clean directory + return fse.remove(repoPath) + .then(function() { + return fse.ensureDir(repoPath); + }) + .then(function() { + return NodeGit.Repository.init(repoPath, 0); + }); + }, + + // Expects empty repo + setupBranches: + function setupBranches(repository, checkoutOurs) { + var repoWorkDir = repository.workdir(); + + var ourBranchName = "ours"; + var theirBranchName = "theirs"; + + var baseFileName = "baseNewFile.txt"; + var ourFileName = "ourNewFile.txt"; + var theirFileName = "theirNewFile.txt"; + + var baseFileContent = "How do you feel about Toll Roads?"; + var ourFileContent = "I like Toll Roads. I have an EZ-Pass!"; + var theirFileContent = "I'm skeptical about Toll Roads"; + + var ourSignature = NodeGit.Signature.create + ("Ron Paul", "RonPaul@TollRoadsRBest.info", 123456789, 60); + var theirSignature = NodeGit.Signature.create + ("Greg Abbott", "Gregggg@IllTollYourFace.us", 123456789, 60); + + var initialCommit; + var ourBranch; + var theirBranch; + + var ret = { + ourBranchName: ourBranchName, + theirBranchName: theirBranchName, + + ourSignature: ourSignature, + theirSignature: theirSignature, + + ourFileName: ourFileName, + theirFileName: theirFileName, + + ourFileContent: ourFileContent, + theirFileContent: theirFileContent + }; + + return Promise.all([ + fse.writeFile(path.join(repoWorkDir, baseFileName), + baseFileContent), + fse.writeFile(path.join(repoWorkDir, ourFileName), + ourFileContent), + fse.writeFile(path.join(repoWorkDir, theirFileName), + theirFileContent) + ]) + .then(function() { + return RepositorySetup.addFileToIndex(repository, baseFileName); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "b5cdc109d437c4541a13fb7509116b5f03d5039a"); + + return repository.createCommit( + "HEAD", ourSignature, ourSignature, + "initial commit", oid, []); + }) + .then(function(commitOid) { + assert.equal(commitOid.toString(), + "be03abdf0353d05924c53bebeb0e5bb129cda44a"); + + return repository.getCommit(commitOid); + }) + .then(function(commit) { + ret.initialCommit = initialCommit = commit; + + return Promise.all([ + repository.createBranch(ourBranchName, initialCommit), + repository.createBranch(theirBranchName, initialCommit) + ]); + }) + .then(function(branches) { + assert(branches[0]); + assert(branches[1]); + + ret.ourBranch = ourBranch = branches[0]; + ret.theirBranch = theirBranch = branches[1]; + + return RepositorySetup.addFileToIndex(repository, ourFileName); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "77867fc0bfeb3f80ab18a78c8d53aa3a06207047"); + + return repository.createCommit(ourBranch.name(), ourSignature, + ourSignature, "we made a commit", oid, [initialCommit]); + }) + .then(function(commitOid) { + return repository.getCommit(commitOid); + }) + .then(function(commit) { + ret.ourCommit = commit; + return NodeGit.Reset.default( + repository, initialCommit, ourFileName); + }) + .then(function() { + return RepositorySetup.addFileToIndex( + repository, theirFileName); + }) + .then(function(oid) { + assert.equal(oid.toString(), + "be5f0fd38a39a67135ad68921c93cd5c17fefb3d"); + + return repository.createCommit( + theirBranch.name(), theirSignature, + theirSignature, "they made a commit", oid, [initialCommit]); + }) + .then(function(commitOid) { + return repository.getCommit(commitOid); + }) + .then(function(commit) { + ret.theirCommit = commit; + return NodeGit.Reset.default( + repository, initialCommit, theirFileName); + }) + .then(function() { + return Promise.all([ + fse.remove(path.join(repoWorkDir, ourFileName)), + fse.remove(path.join(repoWorkDir, theirFileName)) + ]); + }) + .then(function() { + if (checkoutOurs) { + var opts = { + checkoutStrategy: NodeGit.Checkout.STRATEGY.FORCE + }; + + return repository.checkoutBranch(ourBranchName, opts); + } + }) + .then(function() { + return ret; + }); + } +}; + +module.exports = RepositorySetup; diff --git a/utils/buildFlags.js b/utils/buildFlags.js new file mode 100644 index 000000000..3c3d9d9b2 --- /dev/null +++ b/utils/buildFlags.js @@ -0,0 +1,19 @@ +var fs = require("fs"); +var path = require("path"); + +var isGitRepo; + +try { + fs.statSync(path.join(__dirname, "..", ".git")); + isGitRepo = true; +} catch (e) { + isGitRepo = false; +} + +module.exports = { + debugBuild: !!process.env.BUILD_DEBUG, + isElectron: process.env.npm_config_runtime === "electron", + isGitRepo: isGitRepo, + isNwjs: process.env.npm_config_runtime === "node-webkit", + mustBuild: !!(isGitRepo || process.env.BUILD_DEBUG || process.env.BUILD_ONLY) +}; diff --git a/utils/execPromise.js b/utils/execPromise.js new file mode 100644 index 000000000..c186a12bc --- /dev/null +++ b/utils/execPromise.js @@ -0,0 +1,17 @@ +var cp = require('child_process'); + +// We have to manually promisify this because at this is required in lifecycle +// methods and we are not guaranteed that any 3rd party packages are installed +// at this point +module.exports = function(command, opts) { + return new Promise(function(resolve, reject) { + return cp.exec(command, opts, function(err, result) { + if (err) { + reject(err); + } + else { + resolve(result); + } + }); + }); +}; diff --git a/utils/gitExecutableLocation.js b/utils/gitExecutableLocation.js new file mode 100644 index 000000000..48316510f --- /dev/null +++ b/utils/gitExecutableLocation.js @@ -0,0 +1,23 @@ +var cp = require("child_process"); + +module.exports = function gitExecutableLocation() { + return new Promise(function(resolve, reject) { + var cmd; + + if (process.platform === "win32") { + cmd = "where git"; + } + else { + cmd = "which git"; + } + + cp.exec(cmd, function(err, stdout, stderr) { + if (err) { + reject(err, stderr); + } + else { + resolve(stdout); + } + }); + }); +}; diff --git a/vendor/README.md b/vendor/README.md new file mode 100644 index 000000000..916269d39 --- /dev/null +++ b/vendor/README.md @@ -0,0 +1,4 @@ +# Openssl + +I do not have the time to maintain working gyp metadata for openssl, +i've taken the easy way out and stolen it from [node.js](http://github.com/joyent/node.git). diff --git a/vendor/http_parser/.gitignore b/vendor/http_parser/.gitignore new file mode 100644 index 000000000..32cb51b2d --- /dev/null +++ b/vendor/http_parser/.gitignore @@ -0,0 +1,28 @@ +/out/ +core +tags +*.o +test +test_g +test_fast +bench +url_parser +parsertrace +parsertrace_g +*.mk +*.Makefile +*.so.* +*.a + + +# Visual Studio uglies +*.suo +*.sln +*.vcxproj +*.vcxproj.filters +*.vcxproj.user +*.opensdf +*.ncrunchsolution* +*.sdf +*.vsp +*.psess diff --git a/vendor/http_parser/.mailmap b/vendor/http_parser/.mailmap new file mode 100644 index 000000000..278d14126 --- /dev/null +++ b/vendor/http_parser/.mailmap @@ -0,0 +1,8 @@ +# update AUTHORS with: +# git log --all --reverse --format='%aN <%aE>' | perl -ne 'BEGIN{print "# Authors ordered by first contribution.\n"} print unless $h{$_}; $h{$_} = 1' > AUTHORS +Ryan Dahl +Salman Haq +Simon Zimmermann +Thomas LE ROUX LE ROUX Thomas +Thomas LE ROUX Thomas LE ROUX +Fedor Indutny diff --git a/vendor/http_parser/.travis.yml b/vendor/http_parser/.travis.yml new file mode 100644 index 000000000..4b038e6e6 --- /dev/null +++ b/vendor/http_parser/.travis.yml @@ -0,0 +1,13 @@ +language: c + +compiler: + - clang + - gcc + +script: + - "make" + +notifications: + email: false + irc: + - "irc.freenode.net#node-ci" diff --git a/vendor/http_parser/AUTHORS b/vendor/http_parser/AUTHORS new file mode 100644 index 000000000..8e2df1d06 --- /dev/null +++ b/vendor/http_parser/AUTHORS @@ -0,0 +1,67 @@ +# Authors ordered by first contribution. +Ryan Dahl +Jeremy Hinegardner +Sergey Shepelev +Joe Damato +tomika +Phoenix Sol +Cliff Frey +Ewen Cheslack-Postava +Santiago Gala +Tim Becker +Jeff Terrace +Ben Noordhuis +Nathan Rajlich +Mark Nottingham +Aman Gupta +Tim Becker +Sean Cunningham +Peter Griess +Salman Haq +Cliff Frey +Jon Kolb +Fouad Mardini +Paul Querna +Felix Geisendörfer +koichik +Andre Caron +Ivo Raisr +James McLaughlin +David Gwynne +Thomas LE ROUX +Randy Rizun +Andre Louis Caron +Simon Zimmermann +Erik Dubbelboer +Martell Malone +Bertrand Paquet +BogDan Vatra +Peter Faiman +Corey Richardson +Tóth Tamás +Cam Swords +Chris Dickinson +Uli Köhler +Charlie Somerville +Patrik Stutz +Fedor Indutny +runner +Alexis Campailla +David Wragg +Vinnie Falco +Alex Butum +Rex Feng +Alex Kocharin +Mark Koopman +Helge Heß +Alexis La Goutte +George Miroshnykov +Maciej Małecki +Marc O'Morain +Jeff Pinner +Timothy J Fontaine +Akagi201 +Romain Giraud +Jay Satiro +Arne Steen +Kjell Schubert diff --git a/vendor/http_parser/LICENSE-MIT b/vendor/http_parser/LICENSE-MIT new file mode 100644 index 000000000..58010b388 --- /dev/null +++ b/vendor/http_parser/LICENSE-MIT @@ -0,0 +1,23 @@ +http_parser.c is based on src/http/ngx_http_parse.c from NGINX copyright +Igor Sysoev. + +Additional changes are licensed under the same terms as NGINX and +copyright Joyent, Inc. and other Node contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/vendor/http_parser/Makefile b/vendor/http_parser/Makefile new file mode 100644 index 000000000..373709c66 --- /dev/null +++ b/vendor/http_parser/Makefile @@ -0,0 +1,136 @@ +# Copyright Joyent, Inc. and other Node contributors. All rights reserved. +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. + +PLATFORM ?= $(shell sh -c 'uname -s | tr "[A-Z]" "[a-z]"') +SONAME ?= libhttp_parser.so.2.5.0 + +CC?=gcc +AR?=ar + +CPPFLAGS ?= +LDFLAGS ?= + +CPPFLAGS += -I. +CPPFLAGS_DEBUG = $(CPPFLAGS) -DHTTP_PARSER_STRICT=1 +CPPFLAGS_DEBUG += $(CPPFLAGS_DEBUG_EXTRA) +CPPFLAGS_FAST = $(CPPFLAGS) -DHTTP_PARSER_STRICT=0 +CPPFLAGS_FAST += $(CPPFLAGS_FAST_EXTRA) +CPPFLAGS_BENCH = $(CPPFLAGS_FAST) + +CFLAGS += -Wall -Wextra -Werror +CFLAGS_DEBUG = $(CFLAGS) -O0 -g $(CFLAGS_DEBUG_EXTRA) +CFLAGS_FAST = $(CFLAGS) -O3 $(CFLAGS_FAST_EXTRA) +CFLAGS_BENCH = $(CFLAGS_FAST) -Wno-unused-parameter +CFLAGS_LIB = $(CFLAGS_FAST) -fPIC + +LDFLAGS_LIB = $(LDFLAGS) -shared + +INSTALL ?= install +PREFIX ?= $(DESTDIR)/usr/local +LIBDIR = $(PREFIX)/lib +INCLUDEDIR = $(PREFIX)/include + +ifneq (darwin,$(PLATFORM)) +# TODO(bnoordhuis) The native SunOS linker expects -h rather than -soname... +LDFLAGS_LIB += -Wl,-soname=$(SONAME) +endif + +test: test_g test_fast + ./test_g + ./test_fast + +test_g: http_parser_g.o test_g.o + $(CC) $(CFLAGS_DEBUG) $(LDFLAGS) http_parser_g.o test_g.o -o $@ + +test_g.o: test.c http_parser.h Makefile + $(CC) $(CPPFLAGS_DEBUG) $(CFLAGS_DEBUG) -c test.c -o $@ + +http_parser_g.o: http_parser.c http_parser.h Makefile + $(CC) $(CPPFLAGS_DEBUG) $(CFLAGS_DEBUG) -c http_parser.c -o $@ + +test_fast: http_parser.o test.o http_parser.h + $(CC) $(CFLAGS_FAST) $(LDFLAGS) http_parser.o test.o -o $@ + +test.o: test.c http_parser.h Makefile + $(CC) $(CPPFLAGS_FAST) $(CFLAGS_FAST) -c test.c -o $@ + +bench: http_parser.o bench.o + $(CC) $(CFLAGS_BENCH) $(LDFLAGS) http_parser.o bench.o -o $@ + +bench.o: bench.c http_parser.h Makefile + $(CC) $(CPPFLAGS_BENCH) $(CFLAGS_BENCH) -c bench.c -o $@ + +http_parser.o: http_parser.c http_parser.h Makefile + $(CC) $(CPPFLAGS_FAST) $(CFLAGS_FAST) -c http_parser.c + +test-run-timed: test_fast + while(true) do time ./test_fast > /dev/null; done + +test-valgrind: test_g + valgrind ./test_g + +libhttp_parser.o: http_parser.c http_parser.h Makefile + $(CC) $(CPPFLAGS_FAST) $(CFLAGS_LIB) -c http_parser.c -o libhttp_parser.o + +library: libhttp_parser.o + $(CC) $(LDFLAGS_LIB) -o $(SONAME) $< + +package: http_parser.o + $(AR) rcs libhttp_parser.a http_parser.o + +url_parser: http_parser.o contrib/url_parser.c + $(CC) $(CPPFLAGS_FAST) $(CFLAGS_FAST) $^ -o $@ + +url_parser_g: http_parser_g.o contrib/url_parser.c + $(CC) $(CPPFLAGS_DEBUG) $(CFLAGS_DEBUG) $^ -o $@ + +parsertrace: http_parser.o contrib/parsertrace.c + $(CC) $(CPPFLAGS_FAST) $(CFLAGS_FAST) $^ -o parsertrace + +parsertrace_g: http_parser_g.o contrib/parsertrace.c + $(CC) $(CPPFLAGS_DEBUG) $(CFLAGS_DEBUG) $^ -o parsertrace_g + +tags: http_parser.c http_parser.h test.c + ctags $^ + +install: library + $(INSTALL) -D http_parser.h $(INCLUDEDIR)/http_parser.h + $(INSTALL) -D $(SONAME) $(LIBDIR)/$(SONAME) + ln -s $(LIBDIR)/$(SONAME) $(LIBDIR)/libhttp_parser.so + +install-strip: library + $(INSTALL) -D http_parser.h $(INCLUDEDIR)/http_parser.h + $(INSTALL) -D -s $(SONAME) $(LIBDIR)/$(SONAME) + ln -s $(LIBDIR)/$(SONAME) $(LIBDIR)/libhttp_parser.so + +uninstall: + rm $(INCLUDEDIR)/http_parser.h + rm $(LIBDIR)/$(SONAME) + rm $(LIBDIR)/libhttp_parser.so + +clean: + rm -f *.o *.a tags test test_fast test_g \ + http_parser.tar libhttp_parser.so.* \ + url_parser url_parser_g parsertrace parsertrace_g + +contrib/url_parser.c: http_parser.h +contrib/parsertrace.c: http_parser.h + +.PHONY: clean package test-run test-run-timed test-valgrind install install-strip uninstall diff --git a/vendor/http_parser/README.md b/vendor/http_parser/README.md new file mode 100644 index 000000000..7c54dd42d --- /dev/null +++ b/vendor/http_parser/README.md @@ -0,0 +1,183 @@ +HTTP Parser +=========== + +[![Build Status](https://travis-ci.org/joyent/http-parser.png?branch=master)](https://travis-ci.org/joyent/http-parser) + +This is a parser for HTTP messages written in C. It parses both requests and +responses. The parser is designed to be used in performance HTTP +applications. It does not make any syscalls nor allocations, it does not +buffer data, it can be interrupted at anytime. Depending on your +architecture, it only requires about 40 bytes of data per message +stream (in a web server that is per connection). + +Features: + + * No dependencies + * Handles persistent streams (keep-alive). + * Decodes chunked encoding. + * Upgrade support + * Defends against buffer overflow attacks. + +The parser extracts the following information from HTTP messages: + + * Header fields and values + * Content-Length + * Request method + * Response status code + * Transfer-Encoding + * HTTP version + * Request URL + * Message body + + +Usage +----- + +One `http_parser` object is used per TCP connection. Initialize the struct +using `http_parser_init()` and set the callbacks. That might look something +like this for a request parser: +```c +http_parser_settings settings; +settings.on_url = my_url_callback; +settings.on_header_field = my_header_field_callback; +/* ... */ + +http_parser *parser = malloc(sizeof(http_parser)); +http_parser_init(parser, HTTP_REQUEST); +parser->data = my_socket; +``` + +When data is received on the socket execute the parser and check for errors. + +```c +size_t len = 80*1024, nparsed; +char buf[len]; +ssize_t recved; + +recved = recv(fd, buf, len, 0); + +if (recved < 0) { + /* Handle error. */ +} + +/* Start up / continue the parser. + * Note we pass recved==0 to signal that EOF has been received. + */ +nparsed = http_parser_execute(parser, &settings, buf, recved); + +if (parser->upgrade) { + /* handle new protocol */ +} else if (nparsed != recved) { + /* Handle error. Usually just close the connection. */ +} +``` + +HTTP needs to know where the end of the stream is. For example, sometimes +servers send responses without Content-Length and expect the client to +consume input (for the body) until EOF. To tell http_parser about EOF, give +`0` as the fourth parameter to `http_parser_execute()`. Callbacks and errors +can still be encountered during an EOF, so one must still be prepared +to receive them. + +Scalar valued message information such as `status_code`, `method`, and the +HTTP version are stored in the parser structure. This data is only +temporally stored in `http_parser` and gets reset on each new message. If +this information is needed later, copy it out of the structure during the +`headers_complete` callback. + +The parser decodes the transfer-encoding for both requests and responses +transparently. That is, a chunked encoding is decoded before being sent to +the on_body callback. + + +The Special Problem of Upgrade +------------------------------ + +HTTP supports upgrading the connection to a different protocol. An +increasingly common example of this is the Web Socket protocol which sends +a request like + + GET /demo HTTP/1.1 + Upgrade: WebSocket + Connection: Upgrade + Host: example.com + Origin: http://example.com + WebSocket-Protocol: sample + +followed by non-HTTP data. + +(See http://tools.ietf.org/html/draft-hixie-thewebsocketprotocol-75 for more +information the Web Socket protocol.) + +To support this, the parser will treat this as a normal HTTP message without a +body, issuing both on_headers_complete and on_message_complete callbacks. However +http_parser_execute() will stop parsing at the end of the headers and return. + +The user is expected to check if `parser->upgrade` has been set to 1 after +`http_parser_execute()` returns. Non-HTTP data begins at the buffer supplied +offset by the return value of `http_parser_execute()`. + + +Callbacks +--------- + +During the `http_parser_execute()` call, the callbacks set in +`http_parser_settings` will be executed. The parser maintains state and +never looks behind, so buffering the data is not necessary. If you need to +save certain data for later usage, you can do that from the callbacks. + +There are two types of callbacks: + +* notification `typedef int (*http_cb) (http_parser*);` + Callbacks: on_message_begin, on_headers_complete, on_message_complete. +* data `typedef int (*http_data_cb) (http_parser*, const char *at, size_t length);` + Callbacks: (requests only) on_url, + (common) on_header_field, on_header_value, on_body; + +Callbacks must return 0 on success. Returning a non-zero value indicates +error to the parser, making it exit immediately. + +In case you parse HTTP message in chunks (i.e. `read()` request line +from socket, parse, read half headers, parse, etc) your data callbacks +may be called more than once. Http-parser guarantees that data pointer is only +valid for the lifetime of callback. You can also `read()` into a heap allocated +buffer to avoid copying memory around if this fits your application. + +Reading headers may be a tricky task if you read/parse headers partially. +Basically, you need to remember whether last header callback was field or value +and apply the following logic: + + (on_header_field and on_header_value shortened to on_h_*) + ------------------------ ------------ -------------------------------------------- + | State (prev. callback) | Callback | Description/action | + ------------------------ ------------ -------------------------------------------- + | nothing (first call) | on_h_field | Allocate new buffer and copy callback data | + | | | into it | + ------------------------ ------------ -------------------------------------------- + | value | on_h_field | New header started. | + | | | Copy current name,value buffers to headers | + | | | list and allocate new buffer for new name | + ------------------------ ------------ -------------------------------------------- + | field | on_h_field | Previous name continues. Reallocate name | + | | | buffer and append callback data to it | + ------------------------ ------------ -------------------------------------------- + | field | on_h_value | Value for current header started. Allocate | + | | | new buffer and copy callback data to it | + ------------------------ ------------ -------------------------------------------- + | value | on_h_value | Value continues. Reallocate value buffer | + | | | and append callback data to it | + ------------------------ ------------ -------------------------------------------- + + +Parsing URLs +------------ + +A simplistic zero-copy URL parser is provided as `http_parser_parse_url()`. +Users of this library may wish to use it to parse URLs constructed from +consecutive `on_url` callbacks. + +See examples of reading in headers: + +* [partial example](http://gist.github.com/155877) in C +* [from http-parser tests](http://github.com/joyent/http-parser/blob/37a0ff8/test.c#L403) in C +* [from Node library](http://github.com/joyent/node/blob/842eaf4/src/http.js#L284) in Javascript diff --git a/vendor/http_parser/bench.c b/vendor/http_parser/bench.c new file mode 100644 index 000000000..5b452fa1c --- /dev/null +++ b/vendor/http_parser/bench.c @@ -0,0 +1,111 @@ +/* Copyright Fedor Indutny. All rights reserved. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. + */ +#include "http_parser.h" +#include +#include +#include +#include + +static const char data[] = + "POST /joyent/http-parser HTTP/1.1\r\n" + "Host: github.com\r\n" + "DNT: 1\r\n" + "Accept-Encoding: gzip, deflate, sdch\r\n" + "Accept-Language: ru-RU,ru;q=0.8,en-US;q=0.6,en;q=0.4\r\n" + "User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) " + "AppleWebKit/537.36 (KHTML, like Gecko) " + "Chrome/39.0.2171.65 Safari/537.36\r\n" + "Accept: text/html,application/xhtml+xml,application/xml;q=0.9," + "image/webp,*/*;q=0.8\r\n" + "Referer: https://github.com/joyent/http-parser\r\n" + "Connection: keep-alive\r\n" + "Transfer-Encoding: chunked\r\n" + "Cache-Control: max-age=0\r\n\r\nb\r\nhello world\r\n0\r\n\r\n"; +static const size_t data_len = sizeof(data) - 1; + +static int on_info(http_parser* p) { + return 0; +} + + +static int on_data(http_parser* p, const char *at, size_t length) { + return 0; +} + +static http_parser_settings settings = { + .on_message_begin = on_info, + .on_headers_complete = on_info, + .on_message_complete = on_info, + .on_header_field = on_data, + .on_header_value = on_data, + .on_url = on_data, + .on_status = on_data, + .on_body = on_data +}; + +int bench(int iter_count, int silent) { + struct http_parser parser; + int i; + int err; + struct timeval start; + struct timeval end; + float rps; + + if (!silent) { + err = gettimeofday(&start, NULL); + assert(err == 0); + } + + for (i = 0; i < iter_count; i++) { + size_t parsed; + http_parser_init(&parser, HTTP_REQUEST); + + parsed = http_parser_execute(&parser, &settings, data, data_len); + assert(parsed == data_len); + } + + if (!silent) { + err = gettimeofday(&end, NULL); + assert(err == 0); + + fprintf(stdout, "Benchmark result:\n"); + + rps = (float) (end.tv_sec - start.tv_sec) + + (end.tv_usec - start.tv_usec) * 1e-6f; + fprintf(stdout, "Took %f seconds to run\n", rps); + + rps = (float) iter_count / rps; + fprintf(stdout, "%f req/sec\n", rps); + fflush(stdout); + } + + return 0; +} + +int main(int argc, char** argv) { + if (argc == 2 && strcmp(argv[1], "infinite") == 0) { + for (;;) + bench(5000000, 1); + return 0; + } else { + return bench(5000000, 0); + } +} diff --git a/vendor/http_parser/contrib/parsertrace.c b/vendor/http_parser/contrib/parsertrace.c new file mode 100644 index 000000000..e7153680f --- /dev/null +++ b/vendor/http_parser/contrib/parsertrace.c @@ -0,0 +1,160 @@ +/* Based on src/http/ngx_http_parse.c from NGINX copyright Igor Sysoev + * + * Additional changes are licensed under the same terms as NGINX and + * copyright Joyent, Inc. and other Node contributors. All rights reserved. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. + */ + +/* Dump what the parser finds to stdout as it happen */ + +#include "http_parser.h" +#include +#include +#include + +int on_message_begin(http_parser* _) { + (void)_; + printf("\n***MESSAGE BEGIN***\n\n"); + return 0; +} + +int on_headers_complete(http_parser* _) { + (void)_; + printf("\n***HEADERS COMPLETE***\n\n"); + return 0; +} + +int on_message_complete(http_parser* _) { + (void)_; + printf("\n***MESSAGE COMPLETE***\n\n"); + return 0; +} + +int on_url(http_parser* _, const char* at, size_t length) { + (void)_; + printf("Url: %.*s\n", (int)length, at); + return 0; +} + +int on_header_field(http_parser* _, const char* at, size_t length) { + (void)_; + printf("Header field: %.*s\n", (int)length, at); + return 0; +} + +int on_header_value(http_parser* _, const char* at, size_t length) { + (void)_; + printf("Header value: %.*s\n", (int)length, at); + return 0; +} + +int on_body(http_parser* _, const char* at, size_t length) { + (void)_; + printf("Body: %.*s\n", (int)length, at); + return 0; +} + +void usage(const char* name) { + fprintf(stderr, + "Usage: %s $type $filename\n" + " type: -x, where x is one of {r,b,q}\n" + " parses file as a Response, reQuest, or Both\n", + name); + exit(EXIT_FAILURE); +} + +int main(int argc, char* argv[]) { + enum http_parser_type file_type; + + if (argc != 3) { + usage(argv[0]); + } + + char* type = argv[1]; + if (type[0] != '-') { + usage(argv[0]); + } + + switch (type[1]) { + /* in the case of "-", type[1] will be NUL */ + case 'r': + file_type = HTTP_RESPONSE; + break; + case 'q': + file_type = HTTP_REQUEST; + break; + case 'b': + file_type = HTTP_BOTH; + break; + default: + usage(argv[0]); + } + + char* filename = argv[2]; + FILE* file = fopen(filename, "r"); + if (file == NULL) { + perror("fopen"); + goto fail; + } + + fseek(file, 0, SEEK_END); + long file_length = ftell(file); + if (file_length == -1) { + perror("ftell"); + goto fail; + } + fseek(file, 0, SEEK_SET); + + char* data = malloc(file_length); + if (fread(data, 1, file_length, file) != (size_t)file_length) { + fprintf(stderr, "couldn't read entire file\n"); + free(data); + goto fail; + } + + http_parser_settings settings; + memset(&settings, 0, sizeof(settings)); + settings.on_message_begin = on_message_begin; + settings.on_url = on_url; + settings.on_header_field = on_header_field; + settings.on_header_value = on_header_value; + settings.on_headers_complete = on_headers_complete; + settings.on_body = on_body; + settings.on_message_complete = on_message_complete; + + http_parser parser; + http_parser_init(&parser, file_type); + size_t nparsed = http_parser_execute(&parser, &settings, data, file_length); + free(data); + + if (nparsed != (size_t)file_length) { + fprintf(stderr, + "Error: %s (%s)\n", + http_errno_description(HTTP_PARSER_ERRNO(&parser)), + http_errno_name(HTTP_PARSER_ERRNO(&parser))); + goto fail; + } + + return EXIT_SUCCESS; + +fail: + fclose(file); + return EXIT_FAILURE; +} diff --git a/vendor/http_parser/contrib/url_parser.c b/vendor/http_parser/contrib/url_parser.c new file mode 100644 index 000000000..6650b414a --- /dev/null +++ b/vendor/http_parser/contrib/url_parser.c @@ -0,0 +1,46 @@ +#include "http_parser.h" +#include +#include + +void +dump_url (const char *url, const struct http_parser_url *u) +{ + unsigned int i; + + printf("\tfield_set: 0x%x, port: %u\n", u->field_set, u->port); + for (i = 0; i < UF_MAX; i++) { + if ((u->field_set & (1 << i)) == 0) { + printf("\tfield_data[%u]: unset\n", i); + continue; + } + + printf("\tfield_data[%u]: off: %u, len: %u, part: %.*s\n", + i, + u->field_data[i].off, + u->field_data[i].len, + u->field_data[i].len, + url + u->field_data[i].off); + } +} + +int main(int argc, char ** argv) { + struct http_parser_url u; + int len, connect, result; + + if (argc != 3) { + printf("Syntax : %s connect|get url\n", argv[0]); + return 1; + } + len = strlen(argv[2]); + connect = strcmp("connect", argv[1]) == 0 ? 1 : 0; + printf("Parsing %s, connect %d\n", argv[2], connect); + + result = http_parser_parse_url(argv[2], len, connect, &u); + if (result != 0) { + printf("Parse error : %d\n", result); + return result; + } + printf("Parse ok, result : \n"); + dump_url(argv[2], &u); + return 0; +} \ No newline at end of file diff --git a/vendor/http_parser/http_parser.c b/vendor/http_parser/http_parser.c new file mode 100644 index 000000000..0fa1c3627 --- /dev/null +++ b/vendor/http_parser/http_parser.c @@ -0,0 +1,2429 @@ +/* Based on src/http/ngx_http_parse.c from NGINX copyright Igor Sysoev + * + * Additional changes are licensed under the same terms as NGINX and + * copyright Joyent, Inc. and other Node contributors. All rights reserved. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. + */ +#include "http_parser.h" +#include +#include +#include +#include +#include +#include + +#ifndef ULLONG_MAX +# define ULLONG_MAX ((uint64_t) -1) /* 2^64-1 */ +#endif + +#ifndef MIN +# define MIN(a,b) ((a) < (b) ? (a) : (b)) +#endif + +#ifndef ARRAY_SIZE +# define ARRAY_SIZE(a) (sizeof(a) / sizeof((a)[0])) +#endif + +#ifndef BIT_AT +# define BIT_AT(a, i) \ + (!!((unsigned int) (a)[(unsigned int) (i) >> 3] & \ + (1 << ((unsigned int) (i) & 7)))) +#endif + +#ifndef ELEM_AT +# define ELEM_AT(a, i, v) ((unsigned int) (i) < ARRAY_SIZE(a) ? (a)[(i)] : (v)) +#endif + +#define SET_ERRNO(e) \ +do { \ + parser->http_errno = (e); \ +} while(0) + +#define CURRENT_STATE() p_state +#define UPDATE_STATE(V) p_state = (enum state) (V); +#define RETURN(V) \ +do { \ + parser->state = CURRENT_STATE(); \ + return (V); \ +} while (0); +#define REEXECUTE() \ + goto reexecute; \ + + +#ifdef __GNUC__ +# define LIKELY(X) __builtin_expect(!!(X), 1) +# define UNLIKELY(X) __builtin_expect(!!(X), 0) +#else +# define LIKELY(X) (X) +# define UNLIKELY(X) (X) +#endif + + +/* Run the notify callback FOR, returning ER if it fails */ +#define CALLBACK_NOTIFY_(FOR, ER) \ +do { \ + assert(HTTP_PARSER_ERRNO(parser) == HPE_OK); \ + \ + if (LIKELY(settings->on_##FOR)) { \ + parser->state = CURRENT_STATE(); \ + if (UNLIKELY(0 != settings->on_##FOR(parser))) { \ + SET_ERRNO(HPE_CB_##FOR); \ + } \ + UPDATE_STATE(parser->state); \ + \ + /* We either errored above or got paused; get out */ \ + if (UNLIKELY(HTTP_PARSER_ERRNO(parser) != HPE_OK)) { \ + return (ER); \ + } \ + } \ +} while (0) + +/* Run the notify callback FOR and consume the current byte */ +#define CALLBACK_NOTIFY(FOR) CALLBACK_NOTIFY_(FOR, p - data + 1) + +/* Run the notify callback FOR and don't consume the current byte */ +#define CALLBACK_NOTIFY_NOADVANCE(FOR) CALLBACK_NOTIFY_(FOR, p - data) + +/* Run data callback FOR with LEN bytes, returning ER if it fails */ +#define CALLBACK_DATA_(FOR, LEN, ER) \ +do { \ + assert(HTTP_PARSER_ERRNO(parser) == HPE_OK); \ + \ + if (FOR##_mark) { \ + if (LIKELY(settings->on_##FOR)) { \ + parser->state = CURRENT_STATE(); \ + if (UNLIKELY(0 != \ + settings->on_##FOR(parser, FOR##_mark, (LEN)))) { \ + SET_ERRNO(HPE_CB_##FOR); \ + } \ + UPDATE_STATE(parser->state); \ + \ + /* We either errored above or got paused; get out */ \ + if (UNLIKELY(HTTP_PARSER_ERRNO(parser) != HPE_OK)) { \ + return (ER); \ + } \ + } \ + FOR##_mark = NULL; \ + } \ +} while (0) + +/* Run the data callback FOR and consume the current byte */ +#define CALLBACK_DATA(FOR) \ + CALLBACK_DATA_(FOR, p - FOR##_mark, p - data + 1) + +/* Run the data callback FOR and don't consume the current byte */ +#define CALLBACK_DATA_NOADVANCE(FOR) \ + CALLBACK_DATA_(FOR, p - FOR##_mark, p - data) + +/* Set the mark FOR; non-destructive if mark is already set */ +#define MARK(FOR) \ +do { \ + if (!FOR##_mark) { \ + FOR##_mark = p; \ + } \ +} while (0) + +/* Don't allow the total size of the HTTP headers (including the status + * line) to exceed HTTP_MAX_HEADER_SIZE. This check is here to protect + * embedders against denial-of-service attacks where the attacker feeds + * us a never-ending header that the embedder keeps buffering. + * + * This check is arguably the responsibility of embedders but we're doing + * it on the embedder's behalf because most won't bother and this way we + * make the web a little safer. HTTP_MAX_HEADER_SIZE is still far bigger + * than any reasonable request or response so this should never affect + * day-to-day operation. + */ +#define COUNT_HEADER_SIZE(V) \ +do { \ + parser->nread += (V); \ + if (UNLIKELY(parser->nread > (HTTP_MAX_HEADER_SIZE))) { \ + SET_ERRNO(HPE_HEADER_OVERFLOW); \ + goto error; \ + } \ +} while (0) + + +#define PROXY_CONNECTION "proxy-connection" +#define CONNECTION "connection" +#define CONTENT_LENGTH "content-length" +#define TRANSFER_ENCODING "transfer-encoding" +#define UPGRADE "upgrade" +#define CHUNKED "chunked" +#define KEEP_ALIVE "keep-alive" +#define CLOSE "close" + + +static const char *method_strings[] = + { +#define XX(num, name, string) #string, + HTTP_METHOD_MAP(XX) +#undef XX + }; + + +/* Tokens as defined by rfc 2616. Also lowercases them. + * token = 1* + * separators = "(" | ")" | "<" | ">" | "@" + * | "," | ";" | ":" | "\" | <"> + * | "/" | "[" | "]" | "?" | "=" + * | "{" | "}" | SP | HT + */ +static const char tokens[256] = { +/* 0 nul 1 soh 2 stx 3 etx 4 eot 5 enq 6 ack 7 bel */ + 0, 0, 0, 0, 0, 0, 0, 0, +/* 8 bs 9 ht 10 nl 11 vt 12 np 13 cr 14 so 15 si */ + 0, 0, 0, 0, 0, 0, 0, 0, +/* 16 dle 17 dc1 18 dc2 19 dc3 20 dc4 21 nak 22 syn 23 etb */ + 0, 0, 0, 0, 0, 0, 0, 0, +/* 24 can 25 em 26 sub 27 esc 28 fs 29 gs 30 rs 31 us */ + 0, 0, 0, 0, 0, 0, 0, 0, +/* 32 sp 33 ! 34 " 35 # 36 $ 37 % 38 & 39 ' */ + 0, '!', 0, '#', '$', '%', '&', '\'', +/* 40 ( 41 ) 42 * 43 + 44 , 45 - 46 . 47 / */ + 0, 0, '*', '+', 0, '-', '.', 0, +/* 48 0 49 1 50 2 51 3 52 4 53 5 54 6 55 7 */ + '0', '1', '2', '3', '4', '5', '6', '7', +/* 56 8 57 9 58 : 59 ; 60 < 61 = 62 > 63 ? */ + '8', '9', 0, 0, 0, 0, 0, 0, +/* 64 @ 65 A 66 B 67 C 68 D 69 E 70 F 71 G */ + 0, 'a', 'b', 'c', 'd', 'e', 'f', 'g', +/* 72 H 73 I 74 J 75 K 76 L 77 M 78 N 79 O */ + 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', +/* 80 P 81 Q 82 R 83 S 84 T 85 U 86 V 87 W */ + 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', +/* 88 X 89 Y 90 Z 91 [ 92 \ 93 ] 94 ^ 95 _ */ + 'x', 'y', 'z', 0, 0, 0, '^', '_', +/* 96 ` 97 a 98 b 99 c 100 d 101 e 102 f 103 g */ + '`', 'a', 'b', 'c', 'd', 'e', 'f', 'g', +/* 104 h 105 i 106 j 107 k 108 l 109 m 110 n 111 o */ + 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', +/* 112 p 113 q 114 r 115 s 116 t 117 u 118 v 119 w */ + 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', +/* 120 x 121 y 122 z 123 { 124 | 125 } 126 ~ 127 del */ + 'x', 'y', 'z', 0, '|', 0, '~', 0 }; + + +static const int8_t unhex[256] = + {-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1 + ,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1 + ,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1 + , 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,-1,-1,-1,-1,-1,-1 + ,-1,10,11,12,13,14,15,-1,-1,-1,-1,-1,-1,-1,-1,-1 + ,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1 + ,-1,10,11,12,13,14,15,-1,-1,-1,-1,-1,-1,-1,-1,-1 + ,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1 + }; + + +#if HTTP_PARSER_STRICT +# define T(v) 0 +#else +# define T(v) v +#endif + + +static const uint8_t normal_url_char[32] = { +/* 0 nul 1 soh 2 stx 3 etx 4 eot 5 enq 6 ack 7 bel */ + 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0, +/* 8 bs 9 ht 10 nl 11 vt 12 np 13 cr 14 so 15 si */ + 0 | T(2) | 0 | 0 | T(16) | 0 | 0 | 0, +/* 16 dle 17 dc1 18 dc2 19 dc3 20 dc4 21 nak 22 syn 23 etb */ + 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0, +/* 24 can 25 em 26 sub 27 esc 28 fs 29 gs 30 rs 31 us */ + 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0, +/* 32 sp 33 ! 34 " 35 # 36 $ 37 % 38 & 39 ' */ + 0 | 2 | 4 | 0 | 16 | 32 | 64 | 128, +/* 40 ( 41 ) 42 * 43 + 44 , 45 - 46 . 47 / */ + 1 | 2 | 4 | 8 | 16 | 32 | 64 | 128, +/* 48 0 49 1 50 2 51 3 52 4 53 5 54 6 55 7 */ + 1 | 2 | 4 | 8 | 16 | 32 | 64 | 128, +/* 56 8 57 9 58 : 59 ; 60 < 61 = 62 > 63 ? */ + 1 | 2 | 4 | 8 | 16 | 32 | 64 | 0, +/* 64 @ 65 A 66 B 67 C 68 D 69 E 70 F 71 G */ + 1 | 2 | 4 | 8 | 16 | 32 | 64 | 128, +/* 72 H 73 I 74 J 75 K 76 L 77 M 78 N 79 O */ + 1 | 2 | 4 | 8 | 16 | 32 | 64 | 128, +/* 80 P 81 Q 82 R 83 S 84 T 85 U 86 V 87 W */ + 1 | 2 | 4 | 8 | 16 | 32 | 64 | 128, +/* 88 X 89 Y 90 Z 91 [ 92 \ 93 ] 94 ^ 95 _ */ + 1 | 2 | 4 | 8 | 16 | 32 | 64 | 128, +/* 96 ` 97 a 98 b 99 c 100 d 101 e 102 f 103 g */ + 1 | 2 | 4 | 8 | 16 | 32 | 64 | 128, +/* 104 h 105 i 106 j 107 k 108 l 109 m 110 n 111 o */ + 1 | 2 | 4 | 8 | 16 | 32 | 64 | 128, +/* 112 p 113 q 114 r 115 s 116 t 117 u 118 v 119 w */ + 1 | 2 | 4 | 8 | 16 | 32 | 64 | 128, +/* 120 x 121 y 122 z 123 { 124 | 125 } 126 ~ 127 del */ + 1 | 2 | 4 | 8 | 16 | 32 | 64 | 0, }; + +#undef T + +enum state + { s_dead = 1 /* important that this is > 0 */ + + , s_start_req_or_res + , s_res_or_resp_H + , s_start_res + , s_res_H + , s_res_HT + , s_res_HTT + , s_res_HTTP + , s_res_first_http_major + , s_res_http_major + , s_res_first_http_minor + , s_res_http_minor + , s_res_first_status_code + , s_res_status_code + , s_res_status_start + , s_res_status + , s_res_line_almost_done + + , s_start_req + + , s_req_method + , s_req_spaces_before_url + , s_req_schema + , s_req_schema_slash + , s_req_schema_slash_slash + , s_req_server_start + , s_req_server + , s_req_server_with_at + , s_req_path + , s_req_query_string_start + , s_req_query_string + , s_req_fragment_start + , s_req_fragment + , s_req_http_start + , s_req_http_H + , s_req_http_HT + , s_req_http_HTT + , s_req_http_HTTP + , s_req_first_http_major + , s_req_http_major + , s_req_first_http_minor + , s_req_http_minor + , s_req_line_almost_done + + , s_header_field_start + , s_header_field + , s_header_value_discard_ws + , s_header_value_discard_ws_almost_done + , s_header_value_discard_lws + , s_header_value_start + , s_header_value + , s_header_value_lws + + , s_header_almost_done + + , s_chunk_size_start + , s_chunk_size + , s_chunk_parameters + , s_chunk_size_almost_done + + , s_headers_almost_done + , s_headers_done + + /* Important: 's_headers_done' must be the last 'header' state. All + * states beyond this must be 'body' states. It is used for overflow + * checking. See the PARSING_HEADER() macro. + */ + + , s_chunk_data + , s_chunk_data_almost_done + , s_chunk_data_done + + , s_body_identity + , s_body_identity_eof + + , s_message_done + }; + + +#define PARSING_HEADER(state) (state <= s_headers_done) + + +enum header_states + { h_general = 0 + , h_C + , h_CO + , h_CON + + , h_matching_connection + , h_matching_proxy_connection + , h_matching_content_length + , h_matching_transfer_encoding + , h_matching_upgrade + + , h_connection + , h_content_length + , h_transfer_encoding + , h_upgrade + + , h_matching_transfer_encoding_chunked + , h_matching_connection_token_start + , h_matching_connection_keep_alive + , h_matching_connection_close + , h_matching_connection_upgrade + , h_matching_connection_token + + , h_transfer_encoding_chunked + , h_connection_keep_alive + , h_connection_close + , h_connection_upgrade + }; + +enum http_host_state + { + s_http_host_dead = 1 + , s_http_userinfo_start + , s_http_userinfo + , s_http_host_start + , s_http_host_v6_start + , s_http_host + , s_http_host_v6 + , s_http_host_v6_end + , s_http_host_port_start + , s_http_host_port +}; + +/* Macros for character classes; depends on strict-mode */ +#define CR '\r' +#define LF '\n' +#define LOWER(c) (unsigned char)(c | 0x20) +#define IS_ALPHA(c) (LOWER(c) >= 'a' && LOWER(c) <= 'z') +#define IS_NUM(c) ((c) >= '0' && (c) <= '9') +#define IS_ALPHANUM(c) (IS_ALPHA(c) || IS_NUM(c)) +#define IS_HEX(c) (IS_NUM(c) || (LOWER(c) >= 'a' && LOWER(c) <= 'f')) +#define IS_MARK(c) ((c) == '-' || (c) == '_' || (c) == '.' || \ + (c) == '!' || (c) == '~' || (c) == '*' || (c) == '\'' || (c) == '(' || \ + (c) == ')') +#define IS_USERINFO_CHAR(c) (IS_ALPHANUM(c) || IS_MARK(c) || (c) == '%' || \ + (c) == ';' || (c) == ':' || (c) == '&' || (c) == '=' || (c) == '+' || \ + (c) == '$' || (c) == ',') + +#define STRICT_TOKEN(c) (tokens[(unsigned char)c]) + +#if HTTP_PARSER_STRICT +#define TOKEN(c) (tokens[(unsigned char)c]) +#define IS_URL_CHAR(c) (BIT_AT(normal_url_char, (unsigned char)c)) +#define IS_HOST_CHAR(c) (IS_ALPHANUM(c) || (c) == '.' || (c) == '-') +#else +#define TOKEN(c) ((c == ' ') ? ' ' : tokens[(unsigned char)c]) +#define IS_URL_CHAR(c) \ + (BIT_AT(normal_url_char, (unsigned char)c) || ((c) & 0x80)) +#define IS_HOST_CHAR(c) \ + (IS_ALPHANUM(c) || (c) == '.' || (c) == '-' || (c) == '_') +#endif + + +#define start_state (parser->type == HTTP_REQUEST ? s_start_req : s_start_res) + + +#if HTTP_PARSER_STRICT +# define STRICT_CHECK(cond) \ +do { \ + if (cond) { \ + SET_ERRNO(HPE_STRICT); \ + goto error; \ + } \ +} while (0) +# define NEW_MESSAGE() (http_should_keep_alive(parser) ? start_state : s_dead) +#else +# define STRICT_CHECK(cond) +# define NEW_MESSAGE() start_state +#endif + + +/* Map errno values to strings for human-readable output */ +#define HTTP_STRERROR_GEN(n, s) { "HPE_" #n, s }, +static struct { + const char *name; + const char *description; +} http_strerror_tab[] = { + HTTP_ERRNO_MAP(HTTP_STRERROR_GEN) +}; +#undef HTTP_STRERROR_GEN + +int http_message_needs_eof(const http_parser *parser); + +/* Our URL parser. + * + * This is designed to be shared by http_parser_execute() for URL validation, + * hence it has a state transition + byte-for-byte interface. In addition, it + * is meant to be embedded in http_parser_parse_url(), which does the dirty + * work of turning state transitions URL components for its API. + * + * This function should only be invoked with non-space characters. It is + * assumed that the caller cares about (and can detect) the transition between + * URL and non-URL states by looking for these. + */ +static enum state +parse_url_char(enum state s, const char ch) +{ + if (ch == ' ' || ch == '\r' || ch == '\n') { + return s_dead; + } + +#if HTTP_PARSER_STRICT + if (ch == '\t' || ch == '\f') { + return s_dead; + } +#endif + + switch (s) { + case s_req_spaces_before_url: + /* Proxied requests are followed by scheme of an absolute URI (alpha). + * All methods except CONNECT are followed by '/' or '*'. + */ + + if (ch == '/' || ch == '*') { + return s_req_path; + } + + if (IS_ALPHA(ch)) { + return s_req_schema; + } + + break; + + case s_req_schema: + if (IS_ALPHA(ch)) { + return s; + } + + if (ch == ':') { + return s_req_schema_slash; + } + + break; + + case s_req_schema_slash: + if (ch == '/') { + return s_req_schema_slash_slash; + } + + break; + + case s_req_schema_slash_slash: + if (ch == '/') { + return s_req_server_start; + } + + break; + + case s_req_server_with_at: + if (ch == '@') { + return s_dead; + } + + /* FALLTHROUGH */ + case s_req_server_start: + case s_req_server: + if (ch == '/') { + return s_req_path; + } + + if (ch == '?') { + return s_req_query_string_start; + } + + if (ch == '@') { + return s_req_server_with_at; + } + + if (IS_USERINFO_CHAR(ch) || ch == '[' || ch == ']') { + return s_req_server; + } + + break; + + case s_req_path: + if (IS_URL_CHAR(ch)) { + return s; + } + + switch (ch) { + case '?': + return s_req_query_string_start; + + case '#': + return s_req_fragment_start; + } + + break; + + case s_req_query_string_start: + case s_req_query_string: + if (IS_URL_CHAR(ch)) { + return s_req_query_string; + } + + switch (ch) { + case '?': + /* allow extra '?' in query string */ + return s_req_query_string; + + case '#': + return s_req_fragment_start; + } + + break; + + case s_req_fragment_start: + if (IS_URL_CHAR(ch)) { + return s_req_fragment; + } + + switch (ch) { + case '?': + return s_req_fragment; + + case '#': + return s; + } + + break; + + case s_req_fragment: + if (IS_URL_CHAR(ch)) { + return s; + } + + switch (ch) { + case '?': + case '#': + return s; + } + + break; + + default: + break; + } + + /* We should never fall out of the switch above unless there's an error */ + return s_dead; +} + +size_t http_parser_execute (http_parser *parser, + const http_parser_settings *settings, + const char *data, + size_t len) +{ + char c, ch; + int8_t unhex_val; + const char *p = data; + const char *header_field_mark = 0; + const char *header_value_mark = 0; + const char *url_mark = 0; + const char *body_mark = 0; + const char *status_mark = 0; + enum state p_state = (enum state) parser->state; + + /* We're in an error state. Don't bother doing anything. */ + if (HTTP_PARSER_ERRNO(parser) != HPE_OK) { + return 0; + } + + if (len == 0) { + switch (CURRENT_STATE()) { + case s_body_identity_eof: + /* Use of CALLBACK_NOTIFY() here would erroneously return 1 byte read if + * we got paused. + */ + CALLBACK_NOTIFY_NOADVANCE(message_complete); + return 0; + + case s_dead: + case s_start_req_or_res: + case s_start_res: + case s_start_req: + return 0; + + default: + SET_ERRNO(HPE_INVALID_EOF_STATE); + return 1; + } + } + + + if (CURRENT_STATE() == s_header_field) + header_field_mark = data; + if (CURRENT_STATE() == s_header_value) + header_value_mark = data; + switch (CURRENT_STATE()) { + case s_req_path: + case s_req_schema: + case s_req_schema_slash: + case s_req_schema_slash_slash: + case s_req_server_start: + case s_req_server: + case s_req_server_with_at: + case s_req_query_string_start: + case s_req_query_string: + case s_req_fragment_start: + case s_req_fragment: + url_mark = data; + break; + case s_res_status: + status_mark = data; + break; + default: + break; + } + + for (p=data; p != data + len; p++) { + ch = *p; + + if (PARSING_HEADER(CURRENT_STATE())) + COUNT_HEADER_SIZE(1); + +reexecute: + switch (CURRENT_STATE()) { + + case s_dead: + /* this state is used after a 'Connection: close' message + * the parser will error out if it reads another message + */ + if (LIKELY(ch == CR || ch == LF)) + break; + + SET_ERRNO(HPE_CLOSED_CONNECTION); + goto error; + + case s_start_req_or_res: + { + if (ch == CR || ch == LF) + break; + parser->flags = 0; + parser->content_length = ULLONG_MAX; + + if (ch == 'H') { + UPDATE_STATE(s_res_or_resp_H); + + CALLBACK_NOTIFY(message_begin); + } else { + parser->type = HTTP_REQUEST; + UPDATE_STATE(s_start_req); + REEXECUTE(); + } + + break; + } + + case s_res_or_resp_H: + if (ch == 'T') { + parser->type = HTTP_RESPONSE; + UPDATE_STATE(s_res_HT); + } else { + if (UNLIKELY(ch != 'E')) { + SET_ERRNO(HPE_INVALID_CONSTANT); + goto error; + } + + parser->type = HTTP_REQUEST; + parser->method = HTTP_HEAD; + parser->index = 2; + UPDATE_STATE(s_req_method); + } + break; + + case s_start_res: + { + parser->flags = 0; + parser->content_length = ULLONG_MAX; + + switch (ch) { + case 'H': + UPDATE_STATE(s_res_H); + break; + + case CR: + case LF: + break; + + default: + SET_ERRNO(HPE_INVALID_CONSTANT); + goto error; + } + + CALLBACK_NOTIFY(message_begin); + break; + } + + case s_res_H: + STRICT_CHECK(ch != 'T'); + UPDATE_STATE(s_res_HT); + break; + + case s_res_HT: + STRICT_CHECK(ch != 'T'); + UPDATE_STATE(s_res_HTT); + break; + + case s_res_HTT: + STRICT_CHECK(ch != 'P'); + UPDATE_STATE(s_res_HTTP); + break; + + case s_res_HTTP: + STRICT_CHECK(ch != '/'); + UPDATE_STATE(s_res_first_http_major); + break; + + case s_res_first_http_major: + if (UNLIKELY(ch < '0' || ch > '9')) { + SET_ERRNO(HPE_INVALID_VERSION); + goto error; + } + + parser->http_major = ch - '0'; + UPDATE_STATE(s_res_http_major); + break; + + /* major HTTP version or dot */ + case s_res_http_major: + { + if (ch == '.') { + UPDATE_STATE(s_res_first_http_minor); + break; + } + + if (!IS_NUM(ch)) { + SET_ERRNO(HPE_INVALID_VERSION); + goto error; + } + + parser->http_major *= 10; + parser->http_major += ch - '0'; + + if (UNLIKELY(parser->http_major > 999)) { + SET_ERRNO(HPE_INVALID_VERSION); + goto error; + } + + break; + } + + /* first digit of minor HTTP version */ + case s_res_first_http_minor: + if (UNLIKELY(!IS_NUM(ch))) { + SET_ERRNO(HPE_INVALID_VERSION); + goto error; + } + + parser->http_minor = ch - '0'; + UPDATE_STATE(s_res_http_minor); + break; + + /* minor HTTP version or end of request line */ + case s_res_http_minor: + { + if (ch == ' ') { + UPDATE_STATE(s_res_first_status_code); + break; + } + + if (UNLIKELY(!IS_NUM(ch))) { + SET_ERRNO(HPE_INVALID_VERSION); + goto error; + } + + parser->http_minor *= 10; + parser->http_minor += ch - '0'; + + if (UNLIKELY(parser->http_minor > 999)) { + SET_ERRNO(HPE_INVALID_VERSION); + goto error; + } + + break; + } + + case s_res_first_status_code: + { + if (!IS_NUM(ch)) { + if (ch == ' ') { + break; + } + + SET_ERRNO(HPE_INVALID_STATUS); + goto error; + } + parser->status_code = ch - '0'; + UPDATE_STATE(s_res_status_code); + break; + } + + case s_res_status_code: + { + if (!IS_NUM(ch)) { + switch (ch) { + case ' ': + UPDATE_STATE(s_res_status_start); + break; + case CR: + UPDATE_STATE(s_res_line_almost_done); + break; + case LF: + UPDATE_STATE(s_header_field_start); + break; + default: + SET_ERRNO(HPE_INVALID_STATUS); + goto error; + } + break; + } + + parser->status_code *= 10; + parser->status_code += ch - '0'; + + if (UNLIKELY(parser->status_code > 999)) { + SET_ERRNO(HPE_INVALID_STATUS); + goto error; + } + + break; + } + + case s_res_status_start: + { + if (ch == CR) { + UPDATE_STATE(s_res_line_almost_done); + break; + } + + if (ch == LF) { + UPDATE_STATE(s_header_field_start); + break; + } + + MARK(status); + UPDATE_STATE(s_res_status); + parser->index = 0; + break; + } + + case s_res_status: + if (ch == CR) { + UPDATE_STATE(s_res_line_almost_done); + CALLBACK_DATA(status); + break; + } + + if (ch == LF) { + UPDATE_STATE(s_header_field_start); + CALLBACK_DATA(status); + break; + } + + break; + + case s_res_line_almost_done: + STRICT_CHECK(ch != LF); + UPDATE_STATE(s_header_field_start); + break; + + case s_start_req: + { + if (ch == CR || ch == LF) + break; + parser->flags = 0; + parser->content_length = ULLONG_MAX; + + if (UNLIKELY(!IS_ALPHA(ch))) { + SET_ERRNO(HPE_INVALID_METHOD); + goto error; + } + + parser->method = (enum http_method) 0; + parser->index = 1; + switch (ch) { + case 'C': parser->method = HTTP_CONNECT; /* or COPY, CHECKOUT */ break; + case 'D': parser->method = HTTP_DELETE; break; + case 'G': parser->method = HTTP_GET; break; + case 'H': parser->method = HTTP_HEAD; break; + case 'L': parser->method = HTTP_LOCK; break; + case 'M': parser->method = HTTP_MKCOL; /* or MOVE, MKACTIVITY, MERGE, M-SEARCH, MKCALENDAR */ break; + case 'N': parser->method = HTTP_NOTIFY; break; + case 'O': parser->method = HTTP_OPTIONS; break; + case 'P': parser->method = HTTP_POST; + /* or PROPFIND|PROPPATCH|PUT|PATCH|PURGE */ + break; + case 'R': parser->method = HTTP_REPORT; break; + case 'S': parser->method = HTTP_SUBSCRIBE; /* or SEARCH */ break; + case 'T': parser->method = HTTP_TRACE; break; + case 'U': parser->method = HTTP_UNLOCK; /* or UNSUBSCRIBE */ break; + default: + SET_ERRNO(HPE_INVALID_METHOD); + goto error; + } + UPDATE_STATE(s_req_method); + + CALLBACK_NOTIFY(message_begin); + + break; + } + + case s_req_method: + { + const char *matcher; + if (UNLIKELY(ch == '\0')) { + SET_ERRNO(HPE_INVALID_METHOD); + goto error; + } + + matcher = method_strings[parser->method]; + if (ch == ' ' && matcher[parser->index] == '\0') { + UPDATE_STATE(s_req_spaces_before_url); + } else if (ch == matcher[parser->index]) { + ; /* nada */ + } else if (parser->method == HTTP_CONNECT) { + if (parser->index == 1 && ch == 'H') { + parser->method = HTTP_CHECKOUT; + } else if (parser->index == 2 && ch == 'P') { + parser->method = HTTP_COPY; + } else { + SET_ERRNO(HPE_INVALID_METHOD); + goto error; + } + } else if (parser->method == HTTP_MKCOL) { + if (parser->index == 1 && ch == 'O') { + parser->method = HTTP_MOVE; + } else if (parser->index == 1 && ch == 'E') { + parser->method = HTTP_MERGE; + } else if (parser->index == 1 && ch == '-') { + parser->method = HTTP_MSEARCH; + } else if (parser->index == 2 && ch == 'A') { + parser->method = HTTP_MKACTIVITY; + } else if (parser->index == 3 && ch == 'A') { + parser->method = HTTP_MKCALENDAR; + } else { + SET_ERRNO(HPE_INVALID_METHOD); + goto error; + } + } else if (parser->method == HTTP_SUBSCRIBE) { + if (parser->index == 1 && ch == 'E') { + parser->method = HTTP_SEARCH; + } else { + SET_ERRNO(HPE_INVALID_METHOD); + goto error; + } + } else if (parser->index == 1 && parser->method == HTTP_POST) { + if (ch == 'R') { + parser->method = HTTP_PROPFIND; /* or HTTP_PROPPATCH */ + } else if (ch == 'U') { + parser->method = HTTP_PUT; /* or HTTP_PURGE */ + } else if (ch == 'A') { + parser->method = HTTP_PATCH; + } else { + SET_ERRNO(HPE_INVALID_METHOD); + goto error; + } + } else if (parser->index == 2) { + if (parser->method == HTTP_PUT) { + if (ch == 'R') { + parser->method = HTTP_PURGE; + } else { + SET_ERRNO(HPE_INVALID_METHOD); + goto error; + } + } else if (parser->method == HTTP_UNLOCK) { + if (ch == 'S') { + parser->method = HTTP_UNSUBSCRIBE; + } else { + SET_ERRNO(HPE_INVALID_METHOD); + goto error; + } + } else { + SET_ERRNO(HPE_INVALID_METHOD); + goto error; + } + } else if (parser->index == 4 && parser->method == HTTP_PROPFIND && ch == 'P') { + parser->method = HTTP_PROPPATCH; + } else { + SET_ERRNO(HPE_INVALID_METHOD); + goto error; + } + + ++parser->index; + break; + } + + case s_req_spaces_before_url: + { + if (ch == ' ') break; + + MARK(url); + if (parser->method == HTTP_CONNECT) { + UPDATE_STATE(s_req_server_start); + } + + UPDATE_STATE(parse_url_char(CURRENT_STATE(), ch)); + if (UNLIKELY(CURRENT_STATE() == s_dead)) { + SET_ERRNO(HPE_INVALID_URL); + goto error; + } + + break; + } + + case s_req_schema: + case s_req_schema_slash: + case s_req_schema_slash_slash: + case s_req_server_start: + { + switch (ch) { + /* No whitespace allowed here */ + case ' ': + case CR: + case LF: + SET_ERRNO(HPE_INVALID_URL); + goto error; + default: + UPDATE_STATE(parse_url_char(CURRENT_STATE(), ch)); + if (UNLIKELY(CURRENT_STATE() == s_dead)) { + SET_ERRNO(HPE_INVALID_URL); + goto error; + } + } + + break; + } + + case s_req_server: + case s_req_server_with_at: + case s_req_path: + case s_req_query_string_start: + case s_req_query_string: + case s_req_fragment_start: + case s_req_fragment: + { + switch (ch) { + case ' ': + UPDATE_STATE(s_req_http_start); + CALLBACK_DATA(url); + break; + case CR: + case LF: + parser->http_major = 0; + parser->http_minor = 9; + UPDATE_STATE((ch == CR) ? + s_req_line_almost_done : + s_header_field_start); + CALLBACK_DATA(url); + break; + default: + UPDATE_STATE(parse_url_char(CURRENT_STATE(), ch)); + if (UNLIKELY(CURRENT_STATE() == s_dead)) { + SET_ERRNO(HPE_INVALID_URL); + goto error; + } + } + break; + } + + case s_req_http_start: + switch (ch) { + case 'H': + UPDATE_STATE(s_req_http_H); + break; + case ' ': + break; + default: + SET_ERRNO(HPE_INVALID_CONSTANT); + goto error; + } + break; + + case s_req_http_H: + STRICT_CHECK(ch != 'T'); + UPDATE_STATE(s_req_http_HT); + break; + + case s_req_http_HT: + STRICT_CHECK(ch != 'T'); + UPDATE_STATE(s_req_http_HTT); + break; + + case s_req_http_HTT: + STRICT_CHECK(ch != 'P'); + UPDATE_STATE(s_req_http_HTTP); + break; + + case s_req_http_HTTP: + STRICT_CHECK(ch != '/'); + UPDATE_STATE(s_req_first_http_major); + break; + + /* first digit of major HTTP version */ + case s_req_first_http_major: + if (UNLIKELY(ch < '1' || ch > '9')) { + SET_ERRNO(HPE_INVALID_VERSION); + goto error; + } + + parser->http_major = ch - '0'; + UPDATE_STATE(s_req_http_major); + break; + + /* major HTTP version or dot */ + case s_req_http_major: + { + if (ch == '.') { + UPDATE_STATE(s_req_first_http_minor); + break; + } + + if (UNLIKELY(!IS_NUM(ch))) { + SET_ERRNO(HPE_INVALID_VERSION); + goto error; + } + + parser->http_major *= 10; + parser->http_major += ch - '0'; + + if (UNLIKELY(parser->http_major > 999)) { + SET_ERRNO(HPE_INVALID_VERSION); + goto error; + } + + break; + } + + /* first digit of minor HTTP version */ + case s_req_first_http_minor: + if (UNLIKELY(!IS_NUM(ch))) { + SET_ERRNO(HPE_INVALID_VERSION); + goto error; + } + + parser->http_minor = ch - '0'; + UPDATE_STATE(s_req_http_minor); + break; + + /* minor HTTP version or end of request line */ + case s_req_http_minor: + { + if (ch == CR) { + UPDATE_STATE(s_req_line_almost_done); + break; + } + + if (ch == LF) { + UPDATE_STATE(s_header_field_start); + break; + } + + /* XXX allow spaces after digit? */ + + if (UNLIKELY(!IS_NUM(ch))) { + SET_ERRNO(HPE_INVALID_VERSION); + goto error; + } + + parser->http_minor *= 10; + parser->http_minor += ch - '0'; + + if (UNLIKELY(parser->http_minor > 999)) { + SET_ERRNO(HPE_INVALID_VERSION); + goto error; + } + + break; + } + + /* end of request line */ + case s_req_line_almost_done: + { + if (UNLIKELY(ch != LF)) { + SET_ERRNO(HPE_LF_EXPECTED); + goto error; + } + + UPDATE_STATE(s_header_field_start); + break; + } + + case s_header_field_start: + { + if (ch == CR) { + UPDATE_STATE(s_headers_almost_done); + break; + } + + if (ch == LF) { + /* they might be just sending \n instead of \r\n so this would be + * the second \n to denote the end of headers*/ + UPDATE_STATE(s_headers_almost_done); + REEXECUTE(); + } + + c = TOKEN(ch); + + if (UNLIKELY(!c)) { + SET_ERRNO(HPE_INVALID_HEADER_TOKEN); + goto error; + } + + MARK(header_field); + + parser->index = 0; + UPDATE_STATE(s_header_field); + + switch (c) { + case 'c': + parser->header_state = h_C; + break; + + case 'p': + parser->header_state = h_matching_proxy_connection; + break; + + case 't': + parser->header_state = h_matching_transfer_encoding; + break; + + case 'u': + parser->header_state = h_matching_upgrade; + break; + + default: + parser->header_state = h_general; + break; + } + break; + } + + case s_header_field: + { + const char* start = p; + for (; p != data + len; p++) { + ch = *p; + c = TOKEN(ch); + + if (!c) + break; + + switch (parser->header_state) { + case h_general: + break; + + case h_C: + parser->index++; + parser->header_state = (c == 'o' ? h_CO : h_general); + break; + + case h_CO: + parser->index++; + parser->header_state = (c == 'n' ? h_CON : h_general); + break; + + case h_CON: + parser->index++; + switch (c) { + case 'n': + parser->header_state = h_matching_connection; + break; + case 't': + parser->header_state = h_matching_content_length; + break; + default: + parser->header_state = h_general; + break; + } + break; + + /* connection */ + + case h_matching_connection: + parser->index++; + if (parser->index > sizeof(CONNECTION)-1 + || c != CONNECTION[parser->index]) { + parser->header_state = h_general; + } else if (parser->index == sizeof(CONNECTION)-2) { + parser->header_state = h_connection; + } + break; + + /* proxy-connection */ + + case h_matching_proxy_connection: + parser->index++; + if (parser->index > sizeof(PROXY_CONNECTION)-1 + || c != PROXY_CONNECTION[parser->index]) { + parser->header_state = h_general; + } else if (parser->index == sizeof(PROXY_CONNECTION)-2) { + parser->header_state = h_connection; + } + break; + + /* content-length */ + + case h_matching_content_length: + parser->index++; + if (parser->index > sizeof(CONTENT_LENGTH)-1 + || c != CONTENT_LENGTH[parser->index]) { + parser->header_state = h_general; + } else if (parser->index == sizeof(CONTENT_LENGTH)-2) { + parser->header_state = h_content_length; + } + break; + + /* transfer-encoding */ + + case h_matching_transfer_encoding: + parser->index++; + if (parser->index > sizeof(TRANSFER_ENCODING)-1 + || c != TRANSFER_ENCODING[parser->index]) { + parser->header_state = h_general; + } else if (parser->index == sizeof(TRANSFER_ENCODING)-2) { + parser->header_state = h_transfer_encoding; + } + break; + + /* upgrade */ + + case h_matching_upgrade: + parser->index++; + if (parser->index > sizeof(UPGRADE)-1 + || c != UPGRADE[parser->index]) { + parser->header_state = h_general; + } else if (parser->index == sizeof(UPGRADE)-2) { + parser->header_state = h_upgrade; + } + break; + + case h_connection: + case h_content_length: + case h_transfer_encoding: + case h_upgrade: + if (ch != ' ') parser->header_state = h_general; + break; + + default: + assert(0 && "Unknown header_state"); + break; + } + } + + COUNT_HEADER_SIZE(p - start); + + if (p == data + len) { + --p; + break; + } + + if (ch == ':') { + UPDATE_STATE(s_header_value_discard_ws); + CALLBACK_DATA(header_field); + break; + } + + SET_ERRNO(HPE_INVALID_HEADER_TOKEN); + goto error; + } + + case s_header_value_discard_ws: + if (ch == ' ' || ch == '\t') break; + + if (ch == CR) { + UPDATE_STATE(s_header_value_discard_ws_almost_done); + break; + } + + if (ch == LF) { + UPDATE_STATE(s_header_value_discard_lws); + break; + } + + /* FALLTHROUGH */ + + case s_header_value_start: + { + MARK(header_value); + + UPDATE_STATE(s_header_value); + parser->index = 0; + + c = LOWER(ch); + + switch (parser->header_state) { + case h_upgrade: + parser->flags |= F_UPGRADE; + parser->header_state = h_general; + break; + + case h_transfer_encoding: + /* looking for 'Transfer-Encoding: chunked' */ + if ('c' == c) { + parser->header_state = h_matching_transfer_encoding_chunked; + } else { + parser->header_state = h_general; + } + break; + + case h_content_length: + if (UNLIKELY(!IS_NUM(ch))) { + SET_ERRNO(HPE_INVALID_CONTENT_LENGTH); + goto error; + } + + parser->content_length = ch - '0'; + break; + + case h_connection: + /* looking for 'Connection: keep-alive' */ + if (c == 'k') { + parser->header_state = h_matching_connection_keep_alive; + /* looking for 'Connection: close' */ + } else if (c == 'c') { + parser->header_state = h_matching_connection_close; + } else if (c == 'u') { + parser->header_state = h_matching_connection_upgrade; + } else { + parser->header_state = h_matching_connection_token; + } + break; + + /* Multi-value `Connection` header */ + case h_matching_connection_token_start: + break; + + default: + parser->header_state = h_general; + break; + } + break; + } + + case s_header_value: + { + const char* start = p; + enum header_states h_state = (enum header_states) parser->header_state; + for (; p != data + len; p++) { + ch = *p; + if (ch == CR) { + UPDATE_STATE(s_header_almost_done); + parser->header_state = h_state; + CALLBACK_DATA(header_value); + break; + } + + if (ch == LF) { + UPDATE_STATE(s_header_almost_done); + COUNT_HEADER_SIZE(p - start); + parser->header_state = h_state; + CALLBACK_DATA_NOADVANCE(header_value); + REEXECUTE(); + } + + c = LOWER(ch); + + switch (h_state) { + case h_general: + { + const char* p_cr; + const char* p_lf; + size_t limit = data + len - p; + + limit = MIN(limit, HTTP_MAX_HEADER_SIZE); + + p_cr = (const char*) memchr(p, CR, limit); + p_lf = (const char*) memchr(p, LF, limit); + if (p_cr != NULL) { + if (p_lf != NULL && p_cr >= p_lf) + p = p_lf; + else + p = p_cr; + } else if (UNLIKELY(p_lf != NULL)) { + p = p_lf; + } else { + p = data + len; + } + --p; + + break; + } + + case h_connection: + case h_transfer_encoding: + assert(0 && "Shouldn't get here."); + break; + + case h_content_length: + { + uint64_t t; + + if (ch == ' ') break; + + if (UNLIKELY(!IS_NUM(ch))) { + SET_ERRNO(HPE_INVALID_CONTENT_LENGTH); + parser->header_state = h_state; + goto error; + } + + t = parser->content_length; + t *= 10; + t += ch - '0'; + + /* Overflow? Test against a conservative limit for simplicity. */ + if (UNLIKELY((ULLONG_MAX - 10) / 10 < parser->content_length)) { + SET_ERRNO(HPE_INVALID_CONTENT_LENGTH); + parser->header_state = h_state; + goto error; + } + + parser->content_length = t; + break; + } + + /* Transfer-Encoding: chunked */ + case h_matching_transfer_encoding_chunked: + parser->index++; + if (parser->index > sizeof(CHUNKED)-1 + || c != CHUNKED[parser->index]) { + h_state = h_general; + } else if (parser->index == sizeof(CHUNKED)-2) { + h_state = h_transfer_encoding_chunked; + } + break; + + case h_matching_connection_token_start: + /* looking for 'Connection: keep-alive' */ + if (c == 'k') { + h_state = h_matching_connection_keep_alive; + /* looking for 'Connection: close' */ + } else if (c == 'c') { + h_state = h_matching_connection_close; + } else if (c == 'u') { + h_state = h_matching_connection_upgrade; + } else if (STRICT_TOKEN(c)) { + h_state = h_matching_connection_token; + } else if (c == ' ' || c == '\t') { + /* Skip lws */ + } else { + h_state = h_general; + } + break; + + /* looking for 'Connection: keep-alive' */ + case h_matching_connection_keep_alive: + parser->index++; + if (parser->index > sizeof(KEEP_ALIVE)-1 + || c != KEEP_ALIVE[parser->index]) { + h_state = h_matching_connection_token; + } else if (parser->index == sizeof(KEEP_ALIVE)-2) { + h_state = h_connection_keep_alive; + } + break; + + /* looking for 'Connection: close' */ + case h_matching_connection_close: + parser->index++; + if (parser->index > sizeof(CLOSE)-1 || c != CLOSE[parser->index]) { + h_state = h_matching_connection_token; + } else if (parser->index == sizeof(CLOSE)-2) { + h_state = h_connection_close; + } + break; + + /* looking for 'Connection: upgrade' */ + case h_matching_connection_upgrade: + parser->index++; + if (parser->index > sizeof(UPGRADE) - 1 || + c != UPGRADE[parser->index]) { + h_state = h_matching_connection_token; + } else if (parser->index == sizeof(UPGRADE)-2) { + h_state = h_connection_upgrade; + } + break; + + case h_matching_connection_token: + if (ch == ',') { + h_state = h_matching_connection_token_start; + parser->index = 0; + } + break; + + case h_transfer_encoding_chunked: + if (ch != ' ') h_state = h_general; + break; + + case h_connection_keep_alive: + case h_connection_close: + case h_connection_upgrade: + if (ch == ',') { + if (h_state == h_connection_keep_alive) { + parser->flags |= F_CONNECTION_KEEP_ALIVE; + } else if (h_state == h_connection_close) { + parser->flags |= F_CONNECTION_CLOSE; + } else if (h_state == h_connection_upgrade) { + parser->flags |= F_CONNECTION_UPGRADE; + } + h_state = h_matching_connection_token_start; + parser->index = 0; + } else if (ch != ' ') { + h_state = h_matching_connection_token; + } + break; + + default: + UPDATE_STATE(s_header_value); + h_state = h_general; + break; + } + } + parser->header_state = h_state; + + COUNT_HEADER_SIZE(p - start); + + if (p == data + len) + --p; + break; + } + + case s_header_almost_done: + { + STRICT_CHECK(ch != LF); + + UPDATE_STATE(s_header_value_lws); + break; + } + + case s_header_value_lws: + { + if (ch == ' ' || ch == '\t') { + UPDATE_STATE(s_header_value_start); + REEXECUTE(); + } + + /* finished the header */ + switch (parser->header_state) { + case h_connection_keep_alive: + parser->flags |= F_CONNECTION_KEEP_ALIVE; + break; + case h_connection_close: + parser->flags |= F_CONNECTION_CLOSE; + break; + case h_transfer_encoding_chunked: + parser->flags |= F_CHUNKED; + break; + case h_connection_upgrade: + parser->flags |= F_CONNECTION_UPGRADE; + break; + default: + break; + } + + UPDATE_STATE(s_header_field_start); + REEXECUTE(); + } + + case s_header_value_discard_ws_almost_done: + { + STRICT_CHECK(ch != LF); + UPDATE_STATE(s_header_value_discard_lws); + break; + } + + case s_header_value_discard_lws: + { + if (ch == ' ' || ch == '\t') { + UPDATE_STATE(s_header_value_discard_ws); + break; + } else { + switch (parser->header_state) { + case h_connection_keep_alive: + parser->flags |= F_CONNECTION_KEEP_ALIVE; + break; + case h_connection_close: + parser->flags |= F_CONNECTION_CLOSE; + break; + case h_connection_upgrade: + parser->flags |= F_CONNECTION_UPGRADE; + break; + case h_transfer_encoding_chunked: + parser->flags |= F_CHUNKED; + break; + default: + break; + } + + /* header value was empty */ + MARK(header_value); + UPDATE_STATE(s_header_field_start); + CALLBACK_DATA_NOADVANCE(header_value); + REEXECUTE(); + } + } + + case s_headers_almost_done: + { + STRICT_CHECK(ch != LF); + + if (parser->flags & F_TRAILING) { + /* End of a chunked request */ + UPDATE_STATE(s_message_done); + CALLBACK_NOTIFY_NOADVANCE(chunk_complete); + REEXECUTE(); + } + + UPDATE_STATE(s_headers_done); + + /* Set this here so that on_headers_complete() callbacks can see it */ + parser->upgrade = + ((parser->flags & (F_UPGRADE | F_CONNECTION_UPGRADE)) == + (F_UPGRADE | F_CONNECTION_UPGRADE) || + parser->method == HTTP_CONNECT); + + /* Here we call the headers_complete callback. This is somewhat + * different than other callbacks because if the user returns 1, we + * will interpret that as saying that this message has no body. This + * is needed for the annoying case of recieving a response to a HEAD + * request. + * + * We'd like to use CALLBACK_NOTIFY_NOADVANCE() here but we cannot, so + * we have to simulate it by handling a change in errno below. + */ + if (settings->on_headers_complete) { + switch (settings->on_headers_complete(parser)) { + case 0: + break; + + case 1: + parser->flags |= F_SKIPBODY; + break; + + default: + SET_ERRNO(HPE_CB_headers_complete); + RETURN(p - data); /* Error */ + } + } + + if (HTTP_PARSER_ERRNO(parser) != HPE_OK) { + RETURN(p - data); + } + + REEXECUTE(); + } + + case s_headers_done: + { + STRICT_CHECK(ch != LF); + + parser->nread = 0; + + int hasBody = parser->flags & F_CHUNKED || + (parser->content_length > 0 && parser->content_length != ULLONG_MAX); + if (parser->upgrade && (parser->method == HTTP_CONNECT || + (parser->flags & F_SKIPBODY) || !hasBody)) { + /* Exit, the rest of the message is in a different protocol. */ + UPDATE_STATE(NEW_MESSAGE()); + CALLBACK_NOTIFY(message_complete); + RETURN((p - data) + 1); + } + + if (parser->flags & F_SKIPBODY) { + UPDATE_STATE(NEW_MESSAGE()); + CALLBACK_NOTIFY(message_complete); + } else if (parser->flags & F_CHUNKED) { + /* chunked encoding - ignore Content-Length header */ + UPDATE_STATE(s_chunk_size_start); + } else { + if (parser->content_length == 0) { + /* Content-Length header given but zero: Content-Length: 0\r\n */ + UPDATE_STATE(NEW_MESSAGE()); + CALLBACK_NOTIFY(message_complete); + } else if (parser->content_length != ULLONG_MAX) { + /* Content-Length header given and non-zero */ + UPDATE_STATE(s_body_identity); + } else { + if (parser->type == HTTP_REQUEST || + !http_message_needs_eof(parser)) { + /* Assume content-length 0 - read the next */ + UPDATE_STATE(NEW_MESSAGE()); + CALLBACK_NOTIFY(message_complete); + } else { + /* Read body until EOF */ + UPDATE_STATE(s_body_identity_eof); + } + } + } + + break; + } + + case s_body_identity: + { + uint64_t to_read = MIN(parser->content_length, + (uint64_t) ((data + len) - p)); + + assert(parser->content_length != 0 + && parser->content_length != ULLONG_MAX); + + /* The difference between advancing content_length and p is because + * the latter will automaticaly advance on the next loop iteration. + * Further, if content_length ends up at 0, we want to see the last + * byte again for our message complete callback. + */ + MARK(body); + parser->content_length -= to_read; + p += to_read - 1; + + if (parser->content_length == 0) { + UPDATE_STATE(s_message_done); + + /* Mimic CALLBACK_DATA_NOADVANCE() but with one extra byte. + * + * The alternative to doing this is to wait for the next byte to + * trigger the data callback, just as in every other case. The + * problem with this is that this makes it difficult for the test + * harness to distinguish between complete-on-EOF and + * complete-on-length. It's not clear that this distinction is + * important for applications, but let's keep it for now. + */ + CALLBACK_DATA_(body, p - body_mark + 1, p - data); + REEXECUTE(); + } + + break; + } + + /* read until EOF */ + case s_body_identity_eof: + MARK(body); + p = data + len - 1; + + break; + + case s_message_done: + UPDATE_STATE(NEW_MESSAGE()); + CALLBACK_NOTIFY(message_complete); + if (parser->upgrade) { + /* Exit, the rest of the message is in a different protocol. */ + RETURN((p - data) + 1); + } + break; + + case s_chunk_size_start: + { + assert(parser->nread == 1); + assert(parser->flags & F_CHUNKED); + + unhex_val = unhex[(unsigned char)ch]; + if (UNLIKELY(unhex_val == -1)) { + SET_ERRNO(HPE_INVALID_CHUNK_SIZE); + goto error; + } + + parser->content_length = unhex_val; + UPDATE_STATE(s_chunk_size); + break; + } + + case s_chunk_size: + { + uint64_t t; + + assert(parser->flags & F_CHUNKED); + + if (ch == CR) { + UPDATE_STATE(s_chunk_size_almost_done); + break; + } + + unhex_val = unhex[(unsigned char)ch]; + + if (unhex_val == -1) { + if (ch == ';' || ch == ' ') { + UPDATE_STATE(s_chunk_parameters); + break; + } + + SET_ERRNO(HPE_INVALID_CHUNK_SIZE); + goto error; + } + + t = parser->content_length; + t *= 16; + t += unhex_val; + + /* Overflow? Test against a conservative limit for simplicity. */ + if (UNLIKELY((ULLONG_MAX - 16) / 16 < parser->content_length)) { + SET_ERRNO(HPE_INVALID_CONTENT_LENGTH); + goto error; + } + + parser->content_length = t; + break; + } + + case s_chunk_parameters: + { + assert(parser->flags & F_CHUNKED); + /* just ignore this shit. TODO check for overflow */ + if (ch == CR) { + UPDATE_STATE(s_chunk_size_almost_done); + break; + } + break; + } + + case s_chunk_size_almost_done: + { + assert(parser->flags & F_CHUNKED); + STRICT_CHECK(ch != LF); + + parser->nread = 0; + + if (parser->content_length == 0) { + parser->flags |= F_TRAILING; + UPDATE_STATE(s_header_field_start); + } else { + UPDATE_STATE(s_chunk_data); + } + CALLBACK_NOTIFY(chunk_header); + break; + } + + case s_chunk_data: + { + uint64_t to_read = MIN(parser->content_length, + (uint64_t) ((data + len) - p)); + + assert(parser->flags & F_CHUNKED); + assert(parser->content_length != 0 + && parser->content_length != ULLONG_MAX); + + /* See the explanation in s_body_identity for why the content + * length and data pointers are managed this way. + */ + MARK(body); + parser->content_length -= to_read; + p += to_read - 1; + + if (parser->content_length == 0) { + UPDATE_STATE(s_chunk_data_almost_done); + } + + break; + } + + case s_chunk_data_almost_done: + assert(parser->flags & F_CHUNKED); + assert(parser->content_length == 0); + STRICT_CHECK(ch != CR); + UPDATE_STATE(s_chunk_data_done); + CALLBACK_DATA(body); + break; + + case s_chunk_data_done: + assert(parser->flags & F_CHUNKED); + STRICT_CHECK(ch != LF); + parser->nread = 0; + UPDATE_STATE(s_chunk_size_start); + CALLBACK_NOTIFY(chunk_complete); + break; + + default: + assert(0 && "unhandled state"); + SET_ERRNO(HPE_INVALID_INTERNAL_STATE); + goto error; + } + } + + /* Run callbacks for any marks that we have leftover after we ran our of + * bytes. There should be at most one of these set, so it's OK to invoke + * them in series (unset marks will not result in callbacks). + * + * We use the NOADVANCE() variety of callbacks here because 'p' has already + * overflowed 'data' and this allows us to correct for the off-by-one that + * we'd otherwise have (since CALLBACK_DATA() is meant to be run with a 'p' + * value that's in-bounds). + */ + + assert(((header_field_mark ? 1 : 0) + + (header_value_mark ? 1 : 0) + + (url_mark ? 1 : 0) + + (body_mark ? 1 : 0) + + (status_mark ? 1 : 0)) <= 1); + + CALLBACK_DATA_NOADVANCE(header_field); + CALLBACK_DATA_NOADVANCE(header_value); + CALLBACK_DATA_NOADVANCE(url); + CALLBACK_DATA_NOADVANCE(body); + CALLBACK_DATA_NOADVANCE(status); + + RETURN(len); + +error: + if (HTTP_PARSER_ERRNO(parser) == HPE_OK) { + SET_ERRNO(HPE_UNKNOWN); + } + + RETURN(p - data); +} + + +/* Does the parser need to see an EOF to find the end of the message? */ +int +http_message_needs_eof (const http_parser *parser) +{ + if (parser->type == HTTP_REQUEST) { + return 0; + } + + /* See RFC 2616 section 4.4 */ + if (parser->status_code / 100 == 1 || /* 1xx e.g. Continue */ + parser->status_code == 204 || /* No Content */ + parser->status_code == 304 || /* Not Modified */ + parser->flags & F_SKIPBODY) { /* response to a HEAD request */ + return 0; + } + + if ((parser->flags & F_CHUNKED) || parser->content_length != ULLONG_MAX) { + return 0; + } + + return 1; +} + + +int +http_should_keep_alive (const http_parser *parser) +{ + if (parser->http_major > 0 && parser->http_minor > 0) { + /* HTTP/1.1 */ + if (parser->flags & F_CONNECTION_CLOSE) { + return 0; + } + } else { + /* HTTP/1.0 or earlier */ + if (!(parser->flags & F_CONNECTION_KEEP_ALIVE)) { + return 0; + } + } + + return !http_message_needs_eof(parser); +} + + +const char * +http_method_str (enum http_method m) +{ + return ELEM_AT(method_strings, m, ""); +} + + +void +http_parser_init (http_parser *parser, enum http_parser_type t) +{ + void *data = parser->data; /* preserve application data */ + memset(parser, 0, sizeof(*parser)); + parser->data = data; + parser->type = t; + parser->state = (t == HTTP_REQUEST ? s_start_req : (t == HTTP_RESPONSE ? s_start_res : s_start_req_or_res)); + parser->http_errno = HPE_OK; +} + +void +http_parser_settings_init(http_parser_settings *settings) +{ + memset(settings, 0, sizeof(*settings)); +} + +const char * +http_errno_name(enum http_errno err) { + assert(((size_t) err) < + (sizeof(http_strerror_tab) / sizeof(http_strerror_tab[0]))); + return http_strerror_tab[err].name; +} + +const char * +http_errno_description(enum http_errno err) { + assert(((size_t) err) < + (sizeof(http_strerror_tab) / sizeof(http_strerror_tab[0]))); + return http_strerror_tab[err].description; +} + +static enum http_host_state +http_parse_host_char(enum http_host_state s, const char ch) { + switch(s) { + case s_http_userinfo: + case s_http_userinfo_start: + if (ch == '@') { + return s_http_host_start; + } + + if (IS_USERINFO_CHAR(ch)) { + return s_http_userinfo; + } + break; + + case s_http_host_start: + if (ch == '[') { + return s_http_host_v6_start; + } + + if (IS_HOST_CHAR(ch)) { + return s_http_host; + } + + break; + + case s_http_host: + if (IS_HOST_CHAR(ch)) { + return s_http_host; + } + + /* FALLTHROUGH */ + case s_http_host_v6_end: + if (ch == ':') { + return s_http_host_port_start; + } + + break; + + case s_http_host_v6: + if (ch == ']') { + return s_http_host_v6_end; + } + + /* FALLTHROUGH */ + case s_http_host_v6_start: + if (IS_HEX(ch) || ch == ':' || ch == '.') { + return s_http_host_v6; + } + + break; + + case s_http_host_port: + case s_http_host_port_start: + if (IS_NUM(ch)) { + return s_http_host_port; + } + + break; + + default: + break; + } + return s_http_host_dead; +} + +static int +http_parse_host(const char * buf, struct http_parser_url *u, int found_at) { + enum http_host_state s; + + const char *p; + size_t buflen = u->field_data[UF_HOST].off + u->field_data[UF_HOST].len; + + u->field_data[UF_HOST].len = 0; + + s = found_at ? s_http_userinfo_start : s_http_host_start; + + for (p = buf + u->field_data[UF_HOST].off; p < buf + buflen; p++) { + enum http_host_state new_s = http_parse_host_char(s, *p); + + if (new_s == s_http_host_dead) { + return 1; + } + + switch(new_s) { + case s_http_host: + if (s != s_http_host) { + u->field_data[UF_HOST].off = p - buf; + } + u->field_data[UF_HOST].len++; + break; + + case s_http_host_v6: + if (s != s_http_host_v6) { + u->field_data[UF_HOST].off = p - buf; + } + u->field_data[UF_HOST].len++; + break; + + case s_http_host_port: + if (s != s_http_host_port) { + u->field_data[UF_PORT].off = p - buf; + u->field_data[UF_PORT].len = 0; + u->field_set |= (1 << UF_PORT); + } + u->field_data[UF_PORT].len++; + break; + + case s_http_userinfo: + if (s != s_http_userinfo) { + u->field_data[UF_USERINFO].off = p - buf ; + u->field_data[UF_USERINFO].len = 0; + u->field_set |= (1 << UF_USERINFO); + } + u->field_data[UF_USERINFO].len++; + break; + + default: + break; + } + s = new_s; + } + + /* Make sure we don't end somewhere unexpected */ + switch (s) { + case s_http_host_start: + case s_http_host_v6_start: + case s_http_host_v6: + case s_http_host_port_start: + case s_http_userinfo: + case s_http_userinfo_start: + return 1; + default: + break; + } + + return 0; +} + +int +http_parser_parse_url(const char *buf, size_t buflen, int is_connect, + struct http_parser_url *u) +{ + enum state s; + const char *p; + enum http_parser_url_fields uf, old_uf; + int found_at = 0; + + u->port = u->field_set = 0; + s = is_connect ? s_req_server_start : s_req_spaces_before_url; + old_uf = UF_MAX; + + for (p = buf; p < buf + buflen; p++) { + s = parse_url_char(s, *p); + + /* Figure out the next field that we're operating on */ + switch (s) { + case s_dead: + return 1; + + /* Skip delimeters */ + case s_req_schema_slash: + case s_req_schema_slash_slash: + case s_req_server_start: + case s_req_query_string_start: + case s_req_fragment_start: + continue; + + case s_req_schema: + uf = UF_SCHEMA; + break; + + case s_req_server_with_at: + found_at = 1; + + /* FALLTROUGH */ + case s_req_server: + uf = UF_HOST; + break; + + case s_req_path: + uf = UF_PATH; + break; + + case s_req_query_string: + uf = UF_QUERY; + break; + + case s_req_fragment: + uf = UF_FRAGMENT; + break; + + default: + assert(!"Unexpected state"); + return 1; + } + + /* Nothing's changed; soldier on */ + if (uf == old_uf) { + u->field_data[uf].len++; + continue; + } + + u->field_data[uf].off = p - buf; + u->field_data[uf].len = 1; + + u->field_set |= (1 << uf); + old_uf = uf; + } + + /* host must be present if there is a schema */ + /* parsing http:///toto will fail */ + if ((u->field_set & ((1 << UF_SCHEMA) | (1 << UF_HOST))) != 0) { + if (http_parse_host(buf, u, found_at) != 0) { + return 1; + } + } + + /* CONNECT requests can only contain "hostname:port" */ + if (is_connect && u->field_set != ((1 << UF_HOST)|(1 << UF_PORT))) { + return 1; + } + + if (u->field_set & (1 << UF_PORT)) { + /* Don't bother with endp; we've already validated the string */ + unsigned long v = strtoul(buf + u->field_data[UF_PORT].off, NULL, 10); + + /* Ports have a max value of 2^16 */ + if (v > 0xffff) { + return 1; + } + + u->port = (uint16_t) v; + } + + return 0; +} + +void +http_parser_pause(http_parser *parser, int paused) { + /* Users should only be pausing/unpausing a parser that is not in an error + * state. In non-debug builds, there's not much that we can do about this + * other than ignore it. + */ + if (HTTP_PARSER_ERRNO(parser) == HPE_OK || + HTTP_PARSER_ERRNO(parser) == HPE_PAUSED) { + SET_ERRNO((paused) ? HPE_PAUSED : HPE_OK); + } else { + assert(0 && "Attempting to pause parser in error state"); + } +} + +int +http_body_is_final(const struct http_parser *parser) { + return parser->state == s_message_done; +} + +unsigned long +http_parser_version(void) { + return HTTP_PARSER_VERSION_MAJOR * 0x10000 | + HTTP_PARSER_VERSION_MINOR * 0x00100 | + HTTP_PARSER_VERSION_PATCH * 0x00001; +} diff --git a/vendor/http_parser/http_parser.gyp b/vendor/http_parser/http_parser.gyp new file mode 100644 index 000000000..ef34ecaea --- /dev/null +++ b/vendor/http_parser/http_parser.gyp @@ -0,0 +1,111 @@ +# This file is used with the GYP meta build system. +# http://code.google.com/p/gyp/ +# To build try this: +# svn co http://gyp.googlecode.com/svn/trunk gyp +# ./gyp/gyp -f make --depth=`pwd` http_parser.gyp +# ./out/Debug/test +{ + 'target_defaults': { + 'default_configuration': 'Debug', + 'configurations': { + # TODO: hoist these out and put them somewhere common, because + # RuntimeLibrary MUST MATCH across the entire project + 'Debug': { + 'defines': [ 'DEBUG', '_DEBUG' ], + 'cflags': [ '-Wall', '-Wextra', '-O0', '-g', '-ftrapv' ], + 'msvs_settings': { + 'VCCLCompilerTool': { + 'RuntimeLibrary': 1, # static debug + }, + }, + }, + 'Release': { + 'defines': [ 'NDEBUG' ], + 'cflags': [ '-Wall', '-Wextra', '-O3' ], + 'msvs_settings': { + 'VCCLCompilerTool': { + 'RuntimeLibrary': 0, # static release + }, + }, + } + }, + 'msvs_settings': { + 'VCCLCompilerTool': { + }, + 'VCLibrarianTool': { + }, + 'VCLinkerTool': { + 'GenerateDebugInformation': 'true', + }, + }, + 'conditions': [ + ['OS == "win"', { + 'defines': [ + 'WIN32' + ], + }] + ], + }, + + 'targets': [ + { + 'target_name': 'http_parser', + 'type': 'static_library', + 'include_dirs': [ '.' ], + 'direct_dependent_settings': { + 'defines': [ 'HTTP_PARSER_STRICT=0' ], + 'include_dirs': [ '.' ], + }, + 'defines': [ 'HTTP_PARSER_STRICT=0' ], + 'sources': [ './http_parser.c', ], + 'conditions': [ + ['OS=="win"', { + 'msvs_settings': { + 'VCCLCompilerTool': { + # Compile as C++. http_parser.c is actually C99, but C++ is + # close enough in this case. + 'CompileAs': 2, + }, + }, + }] + ], + }, + + { + 'target_name': 'http_parser_strict', + 'type': 'static_library', + 'include_dirs': [ '.' ], + 'direct_dependent_settings': { + 'defines': [ 'HTTP_PARSER_STRICT=1' ], + 'include_dirs': [ '.' ], + }, + 'defines': [ 'HTTP_PARSER_STRICT=1' ], + 'sources': [ './http_parser.c', ], + 'conditions': [ + ['OS=="win"', { + 'msvs_settings': { + 'VCCLCompilerTool': { + # Compile as C++. http_parser.c is actually C99, but C++ is + # close enough in this case. + 'CompileAs': 2, + }, + }, + }] + ], + }, + + { + 'target_name': 'test-nonstrict', + 'type': 'executable', + 'dependencies': [ 'http_parser' ], + 'sources': [ 'test.c' ] + }, + + { + 'target_name': 'test-strict', + 'type': 'executable', + 'dependencies': [ 'http_parser_strict' ], + 'sources': [ 'test.c' ] + } + ] +} diff --git a/vendor/http_parser/http_parser.h b/vendor/http_parser/http_parser.h new file mode 100644 index 000000000..eb71bf992 --- /dev/null +++ b/vendor/http_parser/http_parser.h @@ -0,0 +1,342 @@ +/* Copyright Joyent, Inc. and other Node contributors. All rights reserved. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. + */ +#ifndef http_parser_h +#define http_parser_h +#ifdef __cplusplus +extern "C" { +#endif + +/* Also update SONAME in the Makefile whenever you change these. */ +#define HTTP_PARSER_VERSION_MAJOR 2 +#define HTTP_PARSER_VERSION_MINOR 5 +#define HTTP_PARSER_VERSION_PATCH 0 + +#include +#if defined(_WIN32) && !defined(__MINGW32__) && (!defined(_MSC_VER) || _MSC_VER<1600) +#include +#include +typedef __int8 int8_t; +typedef unsigned __int8 uint8_t; +typedef __int16 int16_t; +typedef unsigned __int16 uint16_t; +typedef __int32 int32_t; +typedef unsigned __int32 uint32_t; +typedef __int64 int64_t; +typedef unsigned __int64 uint64_t; +#else +#include +#endif + +/* Compile with -DHTTP_PARSER_STRICT=0 to make less checks, but run + * faster + */ +#ifndef HTTP_PARSER_STRICT +# define HTTP_PARSER_STRICT 1 +#endif + +/* Maximium header size allowed. If the macro is not defined + * before including this header then the default is used. To + * change the maximum header size, define the macro in the build + * environment (e.g. -DHTTP_MAX_HEADER_SIZE=). To remove + * the effective limit on the size of the header, define the macro + * to a very large number (e.g. -DHTTP_MAX_HEADER_SIZE=0x7fffffff) + */ +#ifndef HTTP_MAX_HEADER_SIZE +# define HTTP_MAX_HEADER_SIZE (80*1024) +#endif + +typedef struct http_parser http_parser; +typedef struct http_parser_settings http_parser_settings; + + +/* Callbacks should return non-zero to indicate an error. The parser will + * then halt execution. + * + * The one exception is on_headers_complete. In a HTTP_RESPONSE parser + * returning '1' from on_headers_complete will tell the parser that it + * should not expect a body. This is used when receiving a response to a + * HEAD request which may contain 'Content-Length' or 'Transfer-Encoding: + * chunked' headers that indicate the presence of a body. + * + * http_data_cb does not return data chunks. It will be called arbitrarily + * many times for each string. E.G. you might get 10 callbacks for "on_url" + * each providing just a few characters more data. + */ +typedef int (*http_data_cb) (http_parser*, const char *at, size_t length); +typedef int (*http_cb) (http_parser*); + + +/* Request Methods */ +#define HTTP_METHOD_MAP(XX) \ + XX(0, DELETE, DELETE) \ + XX(1, GET, GET) \ + XX(2, HEAD, HEAD) \ + XX(3, POST, POST) \ + XX(4, PUT, PUT) \ + /* pathological */ \ + XX(5, CONNECT, CONNECT) \ + XX(6, OPTIONS, OPTIONS) \ + XX(7, TRACE, TRACE) \ + /* webdav */ \ + XX(8, COPY, COPY) \ + XX(9, LOCK, LOCK) \ + XX(10, MKCOL, MKCOL) \ + XX(11, MOVE, MOVE) \ + XX(12, PROPFIND, PROPFIND) \ + XX(13, PROPPATCH, PROPPATCH) \ + XX(14, SEARCH, SEARCH) \ + XX(15, UNLOCK, UNLOCK) \ + /* subversion */ \ + XX(16, REPORT, REPORT) \ + XX(17, MKACTIVITY, MKACTIVITY) \ + XX(18, CHECKOUT, CHECKOUT) \ + XX(19, MERGE, MERGE) \ + /* upnp */ \ + XX(20, MSEARCH, M-SEARCH) \ + XX(21, NOTIFY, NOTIFY) \ + XX(22, SUBSCRIBE, SUBSCRIBE) \ + XX(23, UNSUBSCRIBE, UNSUBSCRIBE) \ + /* RFC-5789 */ \ + XX(24, PATCH, PATCH) \ + XX(25, PURGE, PURGE) \ + /* CalDAV */ \ + XX(26, MKCALENDAR, MKCALENDAR) \ + +enum http_method + { +#define XX(num, name, string) HTTP_##name = num, + HTTP_METHOD_MAP(XX) +#undef XX + }; + + +enum http_parser_type { HTTP_REQUEST, HTTP_RESPONSE, HTTP_BOTH }; + + +/* Flag values for http_parser.flags field */ +enum flags + { F_CHUNKED = 1 << 0 + , F_CONNECTION_KEEP_ALIVE = 1 << 1 + , F_CONNECTION_CLOSE = 1 << 2 + , F_CONNECTION_UPGRADE = 1 << 3 + , F_TRAILING = 1 << 4 + , F_UPGRADE = 1 << 5 + , F_SKIPBODY = 1 << 6 + }; + + +/* Map for errno-related constants + * + * The provided argument should be a macro that takes 2 arguments. + */ +#define HTTP_ERRNO_MAP(XX) \ + /* No error */ \ + XX(OK, "success") \ + \ + /* Callback-related errors */ \ + XX(CB_message_begin, "the on_message_begin callback failed") \ + XX(CB_url, "the on_url callback failed") \ + XX(CB_header_field, "the on_header_field callback failed") \ + XX(CB_header_value, "the on_header_value callback failed") \ + XX(CB_headers_complete, "the on_headers_complete callback failed") \ + XX(CB_body, "the on_body callback failed") \ + XX(CB_message_complete, "the on_message_complete callback failed") \ + XX(CB_status, "the on_status callback failed") \ + XX(CB_chunk_header, "the on_chunk_header callback failed") \ + XX(CB_chunk_complete, "the on_chunk_complete callback failed") \ + \ + /* Parsing-related errors */ \ + XX(INVALID_EOF_STATE, "stream ended at an unexpected time") \ + XX(HEADER_OVERFLOW, \ + "too many header bytes seen; overflow detected") \ + XX(CLOSED_CONNECTION, \ + "data received after completed connection: close message") \ + XX(INVALID_VERSION, "invalid HTTP version") \ + XX(INVALID_STATUS, "invalid HTTP status code") \ + XX(INVALID_METHOD, "invalid HTTP method") \ + XX(INVALID_URL, "invalid URL") \ + XX(INVALID_HOST, "invalid host") \ + XX(INVALID_PORT, "invalid port") \ + XX(INVALID_PATH, "invalid path") \ + XX(INVALID_QUERY_STRING, "invalid query string") \ + XX(INVALID_FRAGMENT, "invalid fragment") \ + XX(LF_EXPECTED, "LF character expected") \ + XX(INVALID_HEADER_TOKEN, "invalid character in header") \ + XX(INVALID_CONTENT_LENGTH, \ + "invalid character in content-length header") \ + XX(INVALID_CHUNK_SIZE, \ + "invalid character in chunk size header") \ + XX(INVALID_CONSTANT, "invalid constant string") \ + XX(INVALID_INTERNAL_STATE, "encountered unexpected internal state")\ + XX(STRICT, "strict mode assertion failed") \ + XX(PAUSED, "parser is paused") \ + XX(UNKNOWN, "an unknown error occurred") + + +/* Define HPE_* values for each errno value above */ +#define HTTP_ERRNO_GEN(n, s) HPE_##n, +enum http_errno { + HTTP_ERRNO_MAP(HTTP_ERRNO_GEN) +}; +#undef HTTP_ERRNO_GEN + + +/* Get an http_errno value from an http_parser */ +#define HTTP_PARSER_ERRNO(p) ((enum http_errno) (p)->http_errno) + + +struct http_parser { + /** PRIVATE **/ + unsigned int type : 2; /* enum http_parser_type */ + unsigned int flags : 7; /* F_* values from 'flags' enum; semi-public */ + unsigned int state : 7; /* enum state from http_parser.c */ + unsigned int header_state : 8; /* enum header_state from http_parser.c */ + unsigned int index : 8; /* index into current matcher */ + + uint32_t nread; /* # bytes read in various scenarios */ + uint64_t content_length; /* # bytes in body (0 if no Content-Length header) */ + + /** READ-ONLY **/ + unsigned short http_major; + unsigned short http_minor; + unsigned int status_code : 16; /* responses only */ + unsigned int method : 8; /* requests only */ + unsigned int http_errno : 7; + + /* 1 = Upgrade header was present and the parser has exited because of that. + * 0 = No upgrade header present. + * Should be checked when http_parser_execute() returns in addition to + * error checking. + */ + unsigned int upgrade : 1; + + /** PUBLIC **/ + void *data; /* A pointer to get hook to the "connection" or "socket" object */ +}; + + +struct http_parser_settings { + http_cb on_message_begin; + http_data_cb on_url; + http_data_cb on_status; + http_data_cb on_header_field; + http_data_cb on_header_value; + http_cb on_headers_complete; + http_data_cb on_body; + http_cb on_message_complete; + /* When on_chunk_header is called, the current chunk length is stored + * in parser->content_length. + */ + http_cb on_chunk_header; + http_cb on_chunk_complete; +}; + + +enum http_parser_url_fields + { UF_SCHEMA = 0 + , UF_HOST = 1 + , UF_PORT = 2 + , UF_PATH = 3 + , UF_QUERY = 4 + , UF_FRAGMENT = 5 + , UF_USERINFO = 6 + , UF_MAX = 7 + }; + + +/* Result structure for http_parser_parse_url(). + * + * Callers should index into field_data[] with UF_* values iff field_set + * has the relevant (1 << UF_*) bit set. As a courtesy to clients (and + * because we probably have padding left over), we convert any port to + * a uint16_t. + */ +struct http_parser_url { + uint16_t field_set; /* Bitmask of (1 << UF_*) values */ + uint16_t port; /* Converted UF_PORT string */ + + struct { + uint16_t off; /* Offset into buffer in which field starts */ + uint16_t len; /* Length of run in buffer */ + } field_data[UF_MAX]; +}; + + +/* Returns the library version. Bits 16-23 contain the major version number, + * bits 8-15 the minor version number and bits 0-7 the patch level. + * Usage example: + * + * unsigned long version = http_parser_version(); + * unsigned major = (version >> 16) & 255; + * unsigned minor = (version >> 8) & 255; + * unsigned patch = version & 255; + * printf("http_parser v%u.%u.%u\n", major, minor, patch); + */ +unsigned long http_parser_version(void); + +void http_parser_init(http_parser *parser, enum http_parser_type type); + + +/* Initialize http_parser_settings members to 0 + */ +void http_parser_settings_init(http_parser_settings *settings); + + +/* Executes the parser. Returns number of parsed bytes. Sets + * `parser->http_errno` on error. */ +size_t http_parser_execute(http_parser *parser, + const http_parser_settings *settings, + const char *data, + size_t len); + + +/* If http_should_keep_alive() in the on_headers_complete or + * on_message_complete callback returns 0, then this should be + * the last message on the connection. + * If you are the server, respond with the "Connection: close" header. + * If you are the client, close the connection. + */ +int http_should_keep_alive(const http_parser *parser); + +/* Returns a string version of the HTTP method. */ +const char *http_method_str(enum http_method m); + +/* Return a string name of the given error */ +const char *http_errno_name(enum http_errno err); + +/* Return a string description of the given error */ +const char *http_errno_description(enum http_errno err); + +/* Parse a URL; return nonzero on failure */ +int http_parser_parse_url(const char *buf, size_t buflen, + int is_connect, + struct http_parser_url *u); + +/* Pause or un-pause the parser; a nonzero value pauses */ +void http_parser_pause(http_parser *parser, int paused); + +/* Checks if this is the final chunk of the body. */ +int http_body_is_final(const http_parser *parser); + +#ifdef __cplusplus +} +#endif +#endif diff --git a/vendor/http_parser/test.c b/vendor/http_parser/test.c new file mode 100644 index 000000000..4c00571eb --- /dev/null +++ b/vendor/http_parser/test.c @@ -0,0 +1,3852 @@ +/* Copyright Joyent, Inc. and other Node contributors. All rights reserved. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. + */ +#include "http_parser.h" +#include +#include +#include +#include /* rand */ +#include +#include + +#if defined(__APPLE__) +# undef strlcat +# undef strlncpy +# undef strlcpy +#endif /* defined(__APPLE__) */ + +#undef TRUE +#define TRUE 1 +#undef FALSE +#define FALSE 0 + +#define MAX_HEADERS 13 +#define MAX_ELEMENT_SIZE 2048 +#define MAX_CHUNKS 16 + +#define MIN(a,b) ((a) < (b) ? (a) : (b)) + +static http_parser *parser; + +struct message { + const char *name; // for debugging purposes + const char *raw; + enum http_parser_type type; + enum http_method method; + int status_code; + char response_status[MAX_ELEMENT_SIZE]; + char request_path[MAX_ELEMENT_SIZE]; + char request_url[MAX_ELEMENT_SIZE]; + char fragment[MAX_ELEMENT_SIZE]; + char query_string[MAX_ELEMENT_SIZE]; + char body[MAX_ELEMENT_SIZE]; + size_t body_size; + const char *host; + const char *userinfo; + uint16_t port; + int num_headers; + enum { NONE=0, FIELD, VALUE } last_header_element; + char headers [MAX_HEADERS][2][MAX_ELEMENT_SIZE]; + int should_keep_alive; + + int num_chunks; + int num_chunks_complete; + int chunk_lengths[MAX_CHUNKS]; + + const char *upgrade; // upgraded body + + unsigned short http_major; + unsigned short http_minor; + + int message_begin_cb_called; + int headers_complete_cb_called; + int message_complete_cb_called; + int message_complete_on_eof; + int body_is_final; +}; + +static int currently_parsing_eof; + +static struct message messages[5]; +static int num_messages; +static http_parser_settings *current_pause_parser; + +/* * R E Q U E S T S * */ +const struct message requests[] = +#define CURL_GET 0 +{ {.name= "curl get" + ,.type= HTTP_REQUEST + ,.raw= "GET /test HTTP/1.1\r\n" + "User-Agent: curl/7.18.0 (i486-pc-linux-gnu) libcurl/7.18.0 OpenSSL/0.9.8g zlib/1.2.3.3 libidn/1.1\r\n" + "Host: 0.0.0.0=5000\r\n" + "Accept: */*\r\n" + "\r\n" + ,.should_keep_alive= TRUE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.method= HTTP_GET + ,.query_string= "" + ,.fragment= "" + ,.request_path= "/test" + ,.request_url= "/test" + ,.num_headers= 3 + ,.headers= + { { "User-Agent", "curl/7.18.0 (i486-pc-linux-gnu) libcurl/7.18.0 OpenSSL/0.9.8g zlib/1.2.3.3 libidn/1.1" } + , { "Host", "0.0.0.0=5000" } + , { "Accept", "*/*" } + } + ,.body= "" + } + +#define FIREFOX_GET 1 +, {.name= "firefox get" + ,.type= HTTP_REQUEST + ,.raw= "GET /favicon.ico HTTP/1.1\r\n" + "Host: 0.0.0.0=5000\r\n" + "User-Agent: Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9) Gecko/2008061015 Firefox/3.0\r\n" + "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8\r\n" + "Accept-Language: en-us,en;q=0.5\r\n" + "Accept-Encoding: gzip,deflate\r\n" + "Accept-Charset: ISO-8859-1,utf-8;q=0.7,*;q=0.7\r\n" + "Keep-Alive: 300\r\n" + "Connection: keep-alive\r\n" + "\r\n" + ,.should_keep_alive= TRUE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.method= HTTP_GET + ,.query_string= "" + ,.fragment= "" + ,.request_path= "/favicon.ico" + ,.request_url= "/favicon.ico" + ,.num_headers= 8 + ,.headers= + { { "Host", "0.0.0.0=5000" } + , { "User-Agent", "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9) Gecko/2008061015 Firefox/3.0" } + , { "Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" } + , { "Accept-Language", "en-us,en;q=0.5" } + , { "Accept-Encoding", "gzip,deflate" } + , { "Accept-Charset", "ISO-8859-1,utf-8;q=0.7,*;q=0.7" } + , { "Keep-Alive", "300" } + , { "Connection", "keep-alive" } + } + ,.body= "" + } + +#define DUMBFUCK 2 +, {.name= "dumbfuck" + ,.type= HTTP_REQUEST + ,.raw= "GET /dumbfuck HTTP/1.1\r\n" + "aaaaaaaaaaaaa:++++++++++\r\n" + "\r\n" + ,.should_keep_alive= TRUE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.method= HTTP_GET + ,.query_string= "" + ,.fragment= "" + ,.request_path= "/dumbfuck" + ,.request_url= "/dumbfuck" + ,.num_headers= 1 + ,.headers= + { { "aaaaaaaaaaaaa", "++++++++++" } + } + ,.body= "" + } + +#define FRAGMENT_IN_URI 3 +, {.name= "fragment in url" + ,.type= HTTP_REQUEST + ,.raw= "GET /forums/1/topics/2375?page=1#posts-17408 HTTP/1.1\r\n" + "\r\n" + ,.should_keep_alive= TRUE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.method= HTTP_GET + ,.query_string= "page=1" + ,.fragment= "posts-17408" + ,.request_path= "/forums/1/topics/2375" + /* XXX request url does include fragment? */ + ,.request_url= "/forums/1/topics/2375?page=1#posts-17408" + ,.num_headers= 0 + ,.body= "" + } + +#define GET_NO_HEADERS_NO_BODY 4 +, {.name= "get no headers no body" + ,.type= HTTP_REQUEST + ,.raw= "GET /get_no_headers_no_body/world HTTP/1.1\r\n" + "\r\n" + ,.should_keep_alive= TRUE + ,.message_complete_on_eof= FALSE /* would need Connection: close */ + ,.http_major= 1 + ,.http_minor= 1 + ,.method= HTTP_GET + ,.query_string= "" + ,.fragment= "" + ,.request_path= "/get_no_headers_no_body/world" + ,.request_url= "/get_no_headers_no_body/world" + ,.num_headers= 0 + ,.body= "" + } + +#define GET_ONE_HEADER_NO_BODY 5 +, {.name= "get one header no body" + ,.type= HTTP_REQUEST + ,.raw= "GET /get_one_header_no_body HTTP/1.1\r\n" + "Accept: */*\r\n" + "\r\n" + ,.should_keep_alive= TRUE + ,.message_complete_on_eof= FALSE /* would need Connection: close */ + ,.http_major= 1 + ,.http_minor= 1 + ,.method= HTTP_GET + ,.query_string= "" + ,.fragment= "" + ,.request_path= "/get_one_header_no_body" + ,.request_url= "/get_one_header_no_body" + ,.num_headers= 1 + ,.headers= + { { "Accept" , "*/*" } + } + ,.body= "" + } + +#define GET_FUNKY_CONTENT_LENGTH 6 +, {.name= "get funky content length body hello" + ,.type= HTTP_REQUEST + ,.raw= "GET /get_funky_content_length_body_hello HTTP/1.0\r\n" + "conTENT-Length: 5\r\n" + "\r\n" + "HELLO" + ,.should_keep_alive= FALSE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 0 + ,.method= HTTP_GET + ,.query_string= "" + ,.fragment= "" + ,.request_path= "/get_funky_content_length_body_hello" + ,.request_url= "/get_funky_content_length_body_hello" + ,.num_headers= 1 + ,.headers= + { { "conTENT-Length" , "5" } + } + ,.body= "HELLO" + } + +#define POST_IDENTITY_BODY_WORLD 7 +, {.name= "post identity body world" + ,.type= HTTP_REQUEST + ,.raw= "POST /post_identity_body_world?q=search#hey HTTP/1.1\r\n" + "Accept: */*\r\n" + "Transfer-Encoding: identity\r\n" + "Content-Length: 5\r\n" + "\r\n" + "World" + ,.should_keep_alive= TRUE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.method= HTTP_POST + ,.query_string= "q=search" + ,.fragment= "hey" + ,.request_path= "/post_identity_body_world" + ,.request_url= "/post_identity_body_world?q=search#hey" + ,.num_headers= 3 + ,.headers= + { { "Accept", "*/*" } + , { "Transfer-Encoding", "identity" } + , { "Content-Length", "5" } + } + ,.body= "World" + } + +#define POST_CHUNKED_ALL_YOUR_BASE 8 +, {.name= "post - chunked body: all your base are belong to us" + ,.type= HTTP_REQUEST + ,.raw= "POST /post_chunked_all_your_base HTTP/1.1\r\n" + "Transfer-Encoding: chunked\r\n" + "\r\n" + "1e\r\nall your base are belong to us\r\n" + "0\r\n" + "\r\n" + ,.should_keep_alive= TRUE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.method= HTTP_POST + ,.query_string= "" + ,.fragment= "" + ,.request_path= "/post_chunked_all_your_base" + ,.request_url= "/post_chunked_all_your_base" + ,.num_headers= 1 + ,.headers= + { { "Transfer-Encoding" , "chunked" } + } + ,.body= "all your base are belong to us" + ,.num_chunks_complete= 2 + ,.chunk_lengths= { 0x1e } + } + +#define TWO_CHUNKS_MULT_ZERO_END 9 +, {.name= "two chunks ; triple zero ending" + ,.type= HTTP_REQUEST + ,.raw= "POST /two_chunks_mult_zero_end HTTP/1.1\r\n" + "Transfer-Encoding: chunked\r\n" + "\r\n" + "5\r\nhello\r\n" + "6\r\n world\r\n" + "000\r\n" + "\r\n" + ,.should_keep_alive= TRUE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.method= HTTP_POST + ,.query_string= "" + ,.fragment= "" + ,.request_path= "/two_chunks_mult_zero_end" + ,.request_url= "/two_chunks_mult_zero_end" + ,.num_headers= 1 + ,.headers= + { { "Transfer-Encoding", "chunked" } + } + ,.body= "hello world" + ,.num_chunks_complete= 3 + ,.chunk_lengths= { 5, 6 } + } + +#define CHUNKED_W_TRAILING_HEADERS 10 +, {.name= "chunked with trailing headers. blech." + ,.type= HTTP_REQUEST + ,.raw= "POST /chunked_w_trailing_headers HTTP/1.1\r\n" + "Transfer-Encoding: chunked\r\n" + "\r\n" + "5\r\nhello\r\n" + "6\r\n world\r\n" + "0\r\n" + "Vary: *\r\n" + "Content-Type: text/plain\r\n" + "\r\n" + ,.should_keep_alive= TRUE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.method= HTTP_POST + ,.query_string= "" + ,.fragment= "" + ,.request_path= "/chunked_w_trailing_headers" + ,.request_url= "/chunked_w_trailing_headers" + ,.num_headers= 3 + ,.headers= + { { "Transfer-Encoding", "chunked" } + , { "Vary", "*" } + , { "Content-Type", "text/plain" } + } + ,.body= "hello world" + ,.num_chunks_complete= 3 + ,.chunk_lengths= { 5, 6 } + } + +#define CHUNKED_W_BULLSHIT_AFTER_LENGTH 11 +, {.name= "with bullshit after the length" + ,.type= HTTP_REQUEST + ,.raw= "POST /chunked_w_bullshit_after_length HTTP/1.1\r\n" + "Transfer-Encoding: chunked\r\n" + "\r\n" + "5; ihatew3;whatthefuck=aretheseparametersfor\r\nhello\r\n" + "6; blahblah; blah\r\n world\r\n" + "0\r\n" + "\r\n" + ,.should_keep_alive= TRUE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.method= HTTP_POST + ,.query_string= "" + ,.fragment= "" + ,.request_path= "/chunked_w_bullshit_after_length" + ,.request_url= "/chunked_w_bullshit_after_length" + ,.num_headers= 1 + ,.headers= + { { "Transfer-Encoding", "chunked" } + } + ,.body= "hello world" + ,.num_chunks_complete= 3 + ,.chunk_lengths= { 5, 6 } + } + +#define WITH_QUOTES 12 +, {.name= "with quotes" + ,.type= HTTP_REQUEST + ,.raw= "GET /with_\"stupid\"_quotes?foo=\"bar\" HTTP/1.1\r\n\r\n" + ,.should_keep_alive= TRUE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.method= HTTP_GET + ,.query_string= "foo=\"bar\"" + ,.fragment= "" + ,.request_path= "/with_\"stupid\"_quotes" + ,.request_url= "/with_\"stupid\"_quotes?foo=\"bar\"" + ,.num_headers= 0 + ,.headers= { } + ,.body= "" + } + +#define APACHEBENCH_GET 13 +/* The server receiving this request SHOULD NOT wait for EOF + * to know that content-length == 0. + * How to represent this in a unit test? message_complete_on_eof + * Compare with NO_CONTENT_LENGTH_RESPONSE. + */ +, {.name = "apachebench get" + ,.type= HTTP_REQUEST + ,.raw= "GET /test HTTP/1.0\r\n" + "Host: 0.0.0.0:5000\r\n" + "User-Agent: ApacheBench/2.3\r\n" + "Accept: */*\r\n\r\n" + ,.should_keep_alive= FALSE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 0 + ,.method= HTTP_GET + ,.query_string= "" + ,.fragment= "" + ,.request_path= "/test" + ,.request_url= "/test" + ,.num_headers= 3 + ,.headers= { { "Host", "0.0.0.0:5000" } + , { "User-Agent", "ApacheBench/2.3" } + , { "Accept", "*/*" } + } + ,.body= "" + } + +#define QUERY_URL_WITH_QUESTION_MARK_GET 14 +/* Some clients include '?' characters in query strings. + */ +, {.name = "query url with question mark" + ,.type= HTTP_REQUEST + ,.raw= "GET /test.cgi?foo=bar?baz HTTP/1.1\r\n\r\n" + ,.should_keep_alive= TRUE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.method= HTTP_GET + ,.query_string= "foo=bar?baz" + ,.fragment= "" + ,.request_path= "/test.cgi" + ,.request_url= "/test.cgi?foo=bar?baz" + ,.num_headers= 0 + ,.headers= {} + ,.body= "" + } + +#define PREFIX_NEWLINE_GET 15 +/* Some clients, especially after a POST in a keep-alive connection, + * will send an extra CRLF before the next request + */ +, {.name = "newline prefix get" + ,.type= HTTP_REQUEST + ,.raw= "\r\nGET /test HTTP/1.1\r\n\r\n" + ,.should_keep_alive= TRUE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.method= HTTP_GET + ,.query_string= "" + ,.fragment= "" + ,.request_path= "/test" + ,.request_url= "/test" + ,.num_headers= 0 + ,.headers= { } + ,.body= "" + } + +#define UPGRADE_REQUEST 16 +, {.name = "upgrade request" + ,.type= HTTP_REQUEST + ,.raw= "GET /demo HTTP/1.1\r\n" + "Host: example.com\r\n" + "Connection: Upgrade\r\n" + "Sec-WebSocket-Key2: 12998 5 Y3 1 .P00\r\n" + "Sec-WebSocket-Protocol: sample\r\n" + "Upgrade: WebSocket\r\n" + "Sec-WebSocket-Key1: 4 @1 46546xW%0l 1 5\r\n" + "Origin: http://example.com\r\n" + "\r\n" + "Hot diggity dogg" + ,.should_keep_alive= TRUE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.method= HTTP_GET + ,.query_string= "" + ,.fragment= "" + ,.request_path= "/demo" + ,.request_url= "/demo" + ,.num_headers= 7 + ,.upgrade="Hot diggity dogg" + ,.headers= { { "Host", "example.com" } + , { "Connection", "Upgrade" } + , { "Sec-WebSocket-Key2", "12998 5 Y3 1 .P00" } + , { "Sec-WebSocket-Protocol", "sample" } + , { "Upgrade", "WebSocket" } + , { "Sec-WebSocket-Key1", "4 @1 46546xW%0l 1 5" } + , { "Origin", "http://example.com" } + } + ,.body= "" + } + +#define CONNECT_REQUEST 17 +, {.name = "connect request" + ,.type= HTTP_REQUEST + ,.raw= "CONNECT 0-home0.netscape.com:443 HTTP/1.0\r\n" + "User-agent: Mozilla/1.1N\r\n" + "Proxy-authorization: basic aGVsbG86d29ybGQ=\r\n" + "\r\n" + "some data\r\n" + "and yet even more data" + ,.should_keep_alive= FALSE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 0 + ,.method= HTTP_CONNECT + ,.query_string= "" + ,.fragment= "" + ,.request_path= "" + ,.request_url= "0-home0.netscape.com:443" + ,.num_headers= 2 + ,.upgrade="some data\r\nand yet even more data" + ,.headers= { { "User-agent", "Mozilla/1.1N" } + , { "Proxy-authorization", "basic aGVsbG86d29ybGQ=" } + } + ,.body= "" + } + +#define REPORT_REQ 18 +, {.name= "report request" + ,.type= HTTP_REQUEST + ,.raw= "REPORT /test HTTP/1.1\r\n" + "\r\n" + ,.should_keep_alive= TRUE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.method= HTTP_REPORT + ,.query_string= "" + ,.fragment= "" + ,.request_path= "/test" + ,.request_url= "/test" + ,.num_headers= 0 + ,.headers= {} + ,.body= "" + } + +#define NO_HTTP_VERSION 19 +, {.name= "request with no http version" + ,.type= HTTP_REQUEST + ,.raw= "GET /\r\n" + "\r\n" + ,.should_keep_alive= FALSE + ,.message_complete_on_eof= FALSE + ,.http_major= 0 + ,.http_minor= 9 + ,.method= HTTP_GET + ,.query_string= "" + ,.fragment= "" + ,.request_path= "/" + ,.request_url= "/" + ,.num_headers= 0 + ,.headers= {} + ,.body= "" + } + +#define MSEARCH_REQ 20 +, {.name= "m-search request" + ,.type= HTTP_REQUEST + ,.raw= "M-SEARCH * HTTP/1.1\r\n" + "HOST: 239.255.255.250:1900\r\n" + "MAN: \"ssdp:discover\"\r\n" + "ST: \"ssdp:all\"\r\n" + "\r\n" + ,.should_keep_alive= TRUE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.method= HTTP_MSEARCH + ,.query_string= "" + ,.fragment= "" + ,.request_path= "*" + ,.request_url= "*" + ,.num_headers= 3 + ,.headers= { { "HOST", "239.255.255.250:1900" } + , { "MAN", "\"ssdp:discover\"" } + , { "ST", "\"ssdp:all\"" } + } + ,.body= "" + } + +#define LINE_FOLDING_IN_HEADER 21 +, {.name= "line folding in header value" + ,.type= HTTP_REQUEST + ,.raw= "GET / HTTP/1.1\r\n" + "Line1: abc\r\n" + "\tdef\r\n" + " ghi\r\n" + "\t\tjkl\r\n" + " mno \r\n" + "\t \tqrs\r\n" + "Line2: \t line2\t\r\n" + "Line3:\r\n" + " line3\r\n" + "Line4: \r\n" + " \r\n" + "Connection:\r\n" + " close\r\n" + "\r\n" + ,.should_keep_alive= FALSE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.method= HTTP_GET + ,.query_string= "" + ,.fragment= "" + ,.request_path= "/" + ,.request_url= "/" + ,.num_headers= 5 + ,.headers= { { "Line1", "abc\tdef ghi\t\tjkl mno \t \tqrs" } + , { "Line2", "line2\t" } + , { "Line3", "line3" } + , { "Line4", "" } + , { "Connection", "close" }, + } + ,.body= "" + } + + +#define QUERY_TERMINATED_HOST 22 +, {.name= "host terminated by a query string" + ,.type= HTTP_REQUEST + ,.raw= "GET http://hypnotoad.org?hail=all HTTP/1.1\r\n" + "\r\n" + ,.should_keep_alive= TRUE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.method= HTTP_GET + ,.query_string= "hail=all" + ,.fragment= "" + ,.request_path= "" + ,.request_url= "http://hypnotoad.org?hail=all" + ,.host= "hypnotoad.org" + ,.num_headers= 0 + ,.headers= { } + ,.body= "" + } + +#define QUERY_TERMINATED_HOSTPORT 23 +, {.name= "host:port terminated by a query string" + ,.type= HTTP_REQUEST + ,.raw= "GET http://hypnotoad.org:1234?hail=all HTTP/1.1\r\n" + "\r\n" + ,.should_keep_alive= TRUE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.method= HTTP_GET + ,.query_string= "hail=all" + ,.fragment= "" + ,.request_path= "" + ,.request_url= "http://hypnotoad.org:1234?hail=all" + ,.host= "hypnotoad.org" + ,.port= 1234 + ,.num_headers= 0 + ,.headers= { } + ,.body= "" + } + +#define SPACE_TERMINATED_HOSTPORT 24 +, {.name= "host:port terminated by a space" + ,.type= HTTP_REQUEST + ,.raw= "GET http://hypnotoad.org:1234 HTTP/1.1\r\n" + "\r\n" + ,.should_keep_alive= TRUE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.method= HTTP_GET + ,.query_string= "" + ,.fragment= "" + ,.request_path= "" + ,.request_url= "http://hypnotoad.org:1234" + ,.host= "hypnotoad.org" + ,.port= 1234 + ,.num_headers= 0 + ,.headers= { } + ,.body= "" + } + +#define PATCH_REQ 25 +, {.name = "PATCH request" + ,.type= HTTP_REQUEST + ,.raw= "PATCH /file.txt HTTP/1.1\r\n" + "Host: www.example.com\r\n" + "Content-Type: application/example\r\n" + "If-Match: \"e0023aa4e\"\r\n" + "Content-Length: 10\r\n" + "\r\n" + "cccccccccc" + ,.should_keep_alive= TRUE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.method= HTTP_PATCH + ,.query_string= "" + ,.fragment= "" + ,.request_path= "/file.txt" + ,.request_url= "/file.txt" + ,.num_headers= 4 + ,.headers= { { "Host", "www.example.com" } + , { "Content-Type", "application/example" } + , { "If-Match", "\"e0023aa4e\"" } + , { "Content-Length", "10" } + } + ,.body= "cccccccccc" + } + +#define CONNECT_CAPS_REQUEST 26 +, {.name = "connect caps request" + ,.type= HTTP_REQUEST + ,.raw= "CONNECT HOME0.NETSCAPE.COM:443 HTTP/1.0\r\n" + "User-agent: Mozilla/1.1N\r\n" + "Proxy-authorization: basic aGVsbG86d29ybGQ=\r\n" + "\r\n" + ,.should_keep_alive= FALSE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 0 + ,.method= HTTP_CONNECT + ,.query_string= "" + ,.fragment= "" + ,.request_path= "" + ,.request_url= "HOME0.NETSCAPE.COM:443" + ,.num_headers= 2 + ,.upgrade="" + ,.headers= { { "User-agent", "Mozilla/1.1N" } + , { "Proxy-authorization", "basic aGVsbG86d29ybGQ=" } + } + ,.body= "" + } + +#if !HTTP_PARSER_STRICT +#define UTF8_PATH_REQ 27 +, {.name= "utf-8 path request" + ,.type= HTTP_REQUEST + ,.raw= "GET /δ¶/δt/pope?q=1#narf HTTP/1.1\r\n" + "Host: github.com\r\n" + "\r\n" + ,.should_keep_alive= TRUE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.method= HTTP_GET + ,.query_string= "q=1" + ,.fragment= "narf" + ,.request_path= "/δ¶/δt/pope" + ,.request_url= "/δ¶/δt/pope?q=1#narf" + ,.num_headers= 1 + ,.headers= { {"Host", "github.com" } + } + ,.body= "" + } + +#define HOSTNAME_UNDERSCORE 28 +, {.name = "hostname underscore" + ,.type= HTTP_REQUEST + ,.raw= "CONNECT home_0.netscape.com:443 HTTP/1.0\r\n" + "User-agent: Mozilla/1.1N\r\n" + "Proxy-authorization: basic aGVsbG86d29ybGQ=\r\n" + "\r\n" + ,.should_keep_alive= FALSE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 0 + ,.method= HTTP_CONNECT + ,.query_string= "" + ,.fragment= "" + ,.request_path= "" + ,.request_url= "home_0.netscape.com:443" + ,.num_headers= 2 + ,.upgrade="" + ,.headers= { { "User-agent", "Mozilla/1.1N" } + , { "Proxy-authorization", "basic aGVsbG86d29ybGQ=" } + } + ,.body= "" + } +#endif /* !HTTP_PARSER_STRICT */ + +/* see https://github.com/ry/http-parser/issues/47 */ +#define EAT_TRAILING_CRLF_NO_CONNECTION_CLOSE 29 +, {.name = "eat CRLF between requests, no \"Connection: close\" header" + ,.raw= "POST / HTTP/1.1\r\n" + "Host: www.example.com\r\n" + "Content-Type: application/x-www-form-urlencoded\r\n" + "Content-Length: 4\r\n" + "\r\n" + "q=42\r\n" /* note the trailing CRLF */ + ,.should_keep_alive= TRUE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.method= HTTP_POST + ,.query_string= "" + ,.fragment= "" + ,.request_path= "/" + ,.request_url= "/" + ,.num_headers= 3 + ,.upgrade= 0 + ,.headers= { { "Host", "www.example.com" } + , { "Content-Type", "application/x-www-form-urlencoded" } + , { "Content-Length", "4" } + } + ,.body= "q=42" + } + +/* see https://github.com/ry/http-parser/issues/47 */ +#define EAT_TRAILING_CRLF_WITH_CONNECTION_CLOSE 30 +, {.name = "eat CRLF between requests even if \"Connection: close\" is set" + ,.raw= "POST / HTTP/1.1\r\n" + "Host: www.example.com\r\n" + "Content-Type: application/x-www-form-urlencoded\r\n" + "Content-Length: 4\r\n" + "Connection: close\r\n" + "\r\n" + "q=42\r\n" /* note the trailing CRLF */ + ,.should_keep_alive= FALSE + ,.message_complete_on_eof= FALSE /* input buffer isn't empty when on_message_complete is called */ + ,.http_major= 1 + ,.http_minor= 1 + ,.method= HTTP_POST + ,.query_string= "" + ,.fragment= "" + ,.request_path= "/" + ,.request_url= "/" + ,.num_headers= 4 + ,.upgrade= 0 + ,.headers= { { "Host", "www.example.com" } + , { "Content-Type", "application/x-www-form-urlencoded" } + , { "Content-Length", "4" } + , { "Connection", "close" } + } + ,.body= "q=42" + } + +#define PURGE_REQ 31 +, {.name = "PURGE request" + ,.type= HTTP_REQUEST + ,.raw= "PURGE /file.txt HTTP/1.1\r\n" + "Host: www.example.com\r\n" + "\r\n" + ,.should_keep_alive= TRUE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.method= HTTP_PURGE + ,.query_string= "" + ,.fragment= "" + ,.request_path= "/file.txt" + ,.request_url= "/file.txt" + ,.num_headers= 1 + ,.headers= { { "Host", "www.example.com" } } + ,.body= "" + } + +#define SEARCH_REQ 32 +, {.name = "SEARCH request" + ,.type= HTTP_REQUEST + ,.raw= "SEARCH / HTTP/1.1\r\n" + "Host: www.example.com\r\n" + "\r\n" + ,.should_keep_alive= TRUE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.method= HTTP_SEARCH + ,.query_string= "" + ,.fragment= "" + ,.request_path= "/" + ,.request_url= "/" + ,.num_headers= 1 + ,.headers= { { "Host", "www.example.com" } } + ,.body= "" + } + +#define PROXY_WITH_BASIC_AUTH 33 +, {.name= "host:port and basic_auth" + ,.type= HTTP_REQUEST + ,.raw= "GET http://a%12:b!&*$@hypnotoad.org:1234/toto HTTP/1.1\r\n" + "\r\n" + ,.should_keep_alive= TRUE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.method= HTTP_GET + ,.fragment= "" + ,.request_path= "/toto" + ,.request_url= "http://a%12:b!&*$@hypnotoad.org:1234/toto" + ,.host= "hypnotoad.org" + ,.userinfo= "a%12:b!&*$" + ,.port= 1234 + ,.num_headers= 0 + ,.headers= { } + ,.body= "" + } + +#define LINE_FOLDING_IN_HEADER_WITH_LF 34 +, {.name= "line folding in header value" + ,.type= HTTP_REQUEST + ,.raw= "GET / HTTP/1.1\n" + "Line1: abc\n" + "\tdef\n" + " ghi\n" + "\t\tjkl\n" + " mno \n" + "\t \tqrs\n" + "Line2: \t line2\t\n" + "Line3:\n" + " line3\n" + "Line4: \n" + " \n" + "Connection:\n" + " close\n" + "\n" + ,.should_keep_alive= FALSE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.method= HTTP_GET + ,.query_string= "" + ,.fragment= "" + ,.request_path= "/" + ,.request_url= "/" + ,.num_headers= 5 + ,.headers= { { "Line1", "abc\tdef ghi\t\tjkl mno \t \tqrs" } + , { "Line2", "line2\t" } + , { "Line3", "line3" } + , { "Line4", "" } + , { "Connection", "close" }, + } + ,.body= "" + } + +#define CONNECTION_MULTI 35 +, {.name = "multiple connection header values with folding" + ,.type= HTTP_REQUEST + ,.raw= "GET /demo HTTP/1.1\r\n" + "Host: example.com\r\n" + "Connection: Something,\r\n" + " Upgrade, ,Keep-Alive\r\n" + "Sec-WebSocket-Key2: 12998 5 Y3 1 .P00\r\n" + "Sec-WebSocket-Protocol: sample\r\n" + "Upgrade: WebSocket\r\n" + "Sec-WebSocket-Key1: 4 @1 46546xW%0l 1 5\r\n" + "Origin: http://example.com\r\n" + "\r\n" + "Hot diggity dogg" + ,.should_keep_alive= TRUE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.method= HTTP_GET + ,.query_string= "" + ,.fragment= "" + ,.request_path= "/demo" + ,.request_url= "/demo" + ,.num_headers= 7 + ,.upgrade="Hot diggity dogg" + ,.headers= { { "Host", "example.com" } + , { "Connection", "Something, Upgrade, ,Keep-Alive" } + , { "Sec-WebSocket-Key2", "12998 5 Y3 1 .P00" } + , { "Sec-WebSocket-Protocol", "sample" } + , { "Upgrade", "WebSocket" } + , { "Sec-WebSocket-Key1", "4 @1 46546xW%0l 1 5" } + , { "Origin", "http://example.com" } + } + ,.body= "" + } + +#define CONNECTION_MULTI_LWS 36 +, {.name = "multiple connection header values with folding and lws" + ,.type= HTTP_REQUEST + ,.raw= "GET /demo HTTP/1.1\r\n" + "Connection: keep-alive, upgrade\r\n" + "Upgrade: WebSocket\r\n" + "\r\n" + "Hot diggity dogg" + ,.should_keep_alive= TRUE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.method= HTTP_GET + ,.query_string= "" + ,.fragment= "" + ,.request_path= "/demo" + ,.request_url= "/demo" + ,.num_headers= 2 + ,.upgrade="Hot diggity dogg" + ,.headers= { { "Connection", "keep-alive, upgrade" } + , { "Upgrade", "WebSocket" } + } + ,.body= "" + } + +#define CONNECTION_MULTI_LWS_CRLF 37 +, {.name = "multiple connection header values with folding and lws" + ,.type= HTTP_REQUEST + ,.raw= "GET /demo HTTP/1.1\r\n" + "Connection: keep-alive, \r\n upgrade\r\n" + "Upgrade: WebSocket\r\n" + "\r\n" + "Hot diggity dogg" + ,.should_keep_alive= TRUE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.method= HTTP_GET + ,.query_string= "" + ,.fragment= "" + ,.request_path= "/demo" + ,.request_url= "/demo" + ,.num_headers= 2 + ,.upgrade="Hot diggity dogg" + ,.headers= { { "Connection", "keep-alive, upgrade" } + , { "Upgrade", "WebSocket" } + } + ,.body= "" + } + +#define UPGRADE_POST_REQUEST 38 +, {.name = "upgrade post request" + ,.type= HTTP_REQUEST + ,.raw= "POST /demo HTTP/1.1\r\n" + "Host: example.com\r\n" + "Connection: Upgrade\r\n" + "Upgrade: HTTP/2.0\r\n" + "Content-Length: 15\r\n" + "\r\n" + "sweet post body" + "Hot diggity dogg" + ,.should_keep_alive= TRUE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.method= HTTP_POST + ,.request_path= "/demo" + ,.request_url= "/demo" + ,.num_headers= 4 + ,.upgrade="Hot diggity dogg" + ,.headers= { { "Host", "example.com" } + , { "Connection", "Upgrade" } + , { "Upgrade", "HTTP/2.0" } + , { "Content-Length", "15" } + } + ,.body= "sweet post body" + } + +#define CONNECT_WITH_BODY_REQUEST 39 +, {.name = "connect with body request" + ,.type= HTTP_REQUEST + ,.raw= "CONNECT foo.bar.com:443 HTTP/1.0\r\n" + "User-agent: Mozilla/1.1N\r\n" + "Proxy-authorization: basic aGVsbG86d29ybGQ=\r\n" + "Content-Length: 10\r\n" + "\r\n" + "blarfcicle" + ,.should_keep_alive= FALSE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 0 + ,.method= HTTP_CONNECT + ,.request_url= "foo.bar.com:443" + ,.num_headers= 3 + ,.upgrade="blarfcicle" + ,.headers= { { "User-agent", "Mozilla/1.1N" } + , { "Proxy-authorization", "basic aGVsbG86d29ybGQ=" } + , { "Content-Length", "10" } + } + ,.body= "" + } + +, {.name= NULL } /* sentinel */ +}; + +/* * R E S P O N S E S * */ +const struct message responses[] = +#define GOOGLE_301 0 +{ {.name= "google 301" + ,.type= HTTP_RESPONSE + ,.raw= "HTTP/1.1 301 Moved Permanently\r\n" + "Location: http://www.google.com/\r\n" + "Content-Type: text/html; charset=UTF-8\r\n" + "Date: Sun, 26 Apr 2009 11:11:49 GMT\r\n" + "Expires: Tue, 26 May 2009 11:11:49 GMT\r\n" + "X-$PrototypeBI-Version: 1.6.0.3\r\n" /* $ char in header field */ + "Cache-Control: public, max-age=2592000\r\n" + "Server: gws\r\n" + "Content-Length: 219 \r\n" + "\r\n" + "\n" + "301 Moved\n" + "

301 Moved

\n" + "The document has moved\n" + "here.\r\n" + "\r\n" + ,.should_keep_alive= TRUE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.status_code= 301 + ,.response_status= "Moved Permanently" + ,.num_headers= 8 + ,.headers= + { { "Location", "http://www.google.com/" } + , { "Content-Type", "text/html; charset=UTF-8" } + , { "Date", "Sun, 26 Apr 2009 11:11:49 GMT" } + , { "Expires", "Tue, 26 May 2009 11:11:49 GMT" } + , { "X-$PrototypeBI-Version", "1.6.0.3" } + , { "Cache-Control", "public, max-age=2592000" } + , { "Server", "gws" } + , { "Content-Length", "219 " } + } + ,.body= "\n" + "301 Moved\n" + "

301 Moved

\n" + "The document has moved\n" + "here.\r\n" + "\r\n" + } + +#define NO_CONTENT_LENGTH_RESPONSE 1 +/* The client should wait for the server's EOF. That is, when content-length + * is not specified, and "Connection: close", the end of body is specified + * by the EOF. + * Compare with APACHEBENCH_GET + */ +, {.name= "no content-length response" + ,.type= HTTP_RESPONSE + ,.raw= "HTTP/1.1 200 OK\r\n" + "Date: Tue, 04 Aug 2009 07:59:32 GMT\r\n" + "Server: Apache\r\n" + "X-Powered-By: Servlet/2.5 JSP/2.1\r\n" + "Content-Type: text/xml; charset=utf-8\r\n" + "Connection: close\r\n" + "\r\n" + "\n" + "\n" + " \n" + " \n" + " SOAP-ENV:Client\n" + " Client Error\n" + " \n" + " \n" + "" + ,.should_keep_alive= FALSE + ,.message_complete_on_eof= TRUE + ,.http_major= 1 + ,.http_minor= 1 + ,.status_code= 200 + ,.response_status= "OK" + ,.num_headers= 5 + ,.headers= + { { "Date", "Tue, 04 Aug 2009 07:59:32 GMT" } + , { "Server", "Apache" } + , { "X-Powered-By", "Servlet/2.5 JSP/2.1" } + , { "Content-Type", "text/xml; charset=utf-8" } + , { "Connection", "close" } + } + ,.body= "\n" + "\n" + " \n" + " \n" + " SOAP-ENV:Client\n" + " Client Error\n" + " \n" + " \n" + "" + } + +#define NO_HEADERS_NO_BODY_404 2 +, {.name= "404 no headers no body" + ,.type= HTTP_RESPONSE + ,.raw= "HTTP/1.1 404 Not Found\r\n\r\n" + ,.should_keep_alive= FALSE + ,.message_complete_on_eof= TRUE + ,.http_major= 1 + ,.http_minor= 1 + ,.status_code= 404 + ,.response_status= "Not Found" + ,.num_headers= 0 + ,.headers= {} + ,.body_size= 0 + ,.body= "" + } + +#define NO_REASON_PHRASE 3 +, {.name= "301 no response phrase" + ,.type= HTTP_RESPONSE + ,.raw= "HTTP/1.1 301\r\n\r\n" + ,.should_keep_alive = FALSE + ,.message_complete_on_eof= TRUE + ,.http_major= 1 + ,.http_minor= 1 + ,.status_code= 301 + ,.response_status= "" + ,.num_headers= 0 + ,.headers= {} + ,.body= "" + } + +#define TRAILING_SPACE_ON_CHUNKED_BODY 4 +, {.name="200 trailing space on chunked body" + ,.type= HTTP_RESPONSE + ,.raw= "HTTP/1.1 200 OK\r\n" + "Content-Type: text/plain\r\n" + "Transfer-Encoding: chunked\r\n" + "\r\n" + "25 \r\n" + "This is the data in the first chunk\r\n" + "\r\n" + "1C\r\n" + "and this is the second one\r\n" + "\r\n" + "0 \r\n" + "\r\n" + ,.should_keep_alive= TRUE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.status_code= 200 + ,.response_status= "OK" + ,.num_headers= 2 + ,.headers= + { {"Content-Type", "text/plain" } + , {"Transfer-Encoding", "chunked" } + } + ,.body_size = 37+28 + ,.body = + "This is the data in the first chunk\r\n" + "and this is the second one\r\n" + ,.num_chunks_complete= 3 + ,.chunk_lengths= { 0x25, 0x1c } + } + +#define NO_CARRIAGE_RET 5 +, {.name="no carriage ret" + ,.type= HTTP_RESPONSE + ,.raw= "HTTP/1.1 200 OK\n" + "Content-Type: text/html; charset=utf-8\n" + "Connection: close\n" + "\n" + "these headers are from http://news.ycombinator.com/" + ,.should_keep_alive= FALSE + ,.message_complete_on_eof= TRUE + ,.http_major= 1 + ,.http_minor= 1 + ,.status_code= 200 + ,.response_status= "OK" + ,.num_headers= 2 + ,.headers= + { {"Content-Type", "text/html; charset=utf-8" } + , {"Connection", "close" } + } + ,.body= "these headers are from http://news.ycombinator.com/" + } + +#define PROXY_CONNECTION 6 +, {.name="proxy connection" + ,.type= HTTP_RESPONSE + ,.raw= "HTTP/1.1 200 OK\r\n" + "Content-Type: text/html; charset=UTF-8\r\n" + "Content-Length: 11\r\n" + "Proxy-Connection: close\r\n" + "Date: Thu, 31 Dec 2009 20:55:48 +0000\r\n" + "\r\n" + "hello world" + ,.should_keep_alive= FALSE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.status_code= 200 + ,.response_status= "OK" + ,.num_headers= 4 + ,.headers= + { {"Content-Type", "text/html; charset=UTF-8" } + , {"Content-Length", "11" } + , {"Proxy-Connection", "close" } + , {"Date", "Thu, 31 Dec 2009 20:55:48 +0000"} + } + ,.body= "hello world" + } + +#define UNDERSTORE_HEADER_KEY 7 + // shown by + // curl -o /dev/null -v "http://ad.doubleclick.net/pfadx/DARTSHELLCONFIGXML;dcmt=text/xml;" +, {.name="underscore header key" + ,.type= HTTP_RESPONSE + ,.raw= "HTTP/1.1 200 OK\r\n" + "Server: DCLK-AdSvr\r\n" + "Content-Type: text/xml\r\n" + "Content-Length: 0\r\n" + "DCLK_imp: v7;x;114750856;0-0;0;17820020;0/0;21603567/21621457/1;;~okv=;dcmt=text/xml;;~cs=o\r\n\r\n" + ,.should_keep_alive= TRUE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.status_code= 200 + ,.response_status= "OK" + ,.num_headers= 4 + ,.headers= + { {"Server", "DCLK-AdSvr" } + , {"Content-Type", "text/xml" } + , {"Content-Length", "0" } + , {"DCLK_imp", "v7;x;114750856;0-0;0;17820020;0/0;21603567/21621457/1;;~okv=;dcmt=text/xml;;~cs=o" } + } + ,.body= "" + } + +#define BONJOUR_MADAME_FR 8 +/* The client should not merge two headers fields when the first one doesn't + * have a value. + */ +, {.name= "bonjourmadame.fr" + ,.type= HTTP_RESPONSE + ,.raw= "HTTP/1.0 301 Moved Permanently\r\n" + "Date: Thu, 03 Jun 2010 09:56:32 GMT\r\n" + "Server: Apache/2.2.3 (Red Hat)\r\n" + "Cache-Control: public\r\n" + "Pragma: \r\n" + "Location: http://www.bonjourmadame.fr/\r\n" + "Vary: Accept-Encoding\r\n" + "Content-Length: 0\r\n" + "Content-Type: text/html; charset=UTF-8\r\n" + "Connection: keep-alive\r\n" + "\r\n" + ,.should_keep_alive= TRUE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 0 + ,.status_code= 301 + ,.response_status= "Moved Permanently" + ,.num_headers= 9 + ,.headers= + { { "Date", "Thu, 03 Jun 2010 09:56:32 GMT" } + , { "Server", "Apache/2.2.3 (Red Hat)" } + , { "Cache-Control", "public" } + , { "Pragma", "" } + , { "Location", "http://www.bonjourmadame.fr/" } + , { "Vary", "Accept-Encoding" } + , { "Content-Length", "0" } + , { "Content-Type", "text/html; charset=UTF-8" } + , { "Connection", "keep-alive" } + } + ,.body= "" + } + +#define RES_FIELD_UNDERSCORE 9 +/* Should handle spaces in header fields */ +, {.name= "field underscore" + ,.type= HTTP_RESPONSE + ,.raw= "HTTP/1.1 200 OK\r\n" + "Date: Tue, 28 Sep 2010 01:14:13 GMT\r\n" + "Server: Apache\r\n" + "Cache-Control: no-cache, must-revalidate\r\n" + "Expires: Mon, 26 Jul 1997 05:00:00 GMT\r\n" + ".et-Cookie: PlaxoCS=1274804622353690521; path=/; domain=.plaxo.com\r\n" + "Vary: Accept-Encoding\r\n" + "_eep-Alive: timeout=45\r\n" /* semantic value ignored */ + "_onnection: Keep-Alive\r\n" /* semantic value ignored */ + "Transfer-Encoding: chunked\r\n" + "Content-Type: text/html\r\n" + "Connection: close\r\n" + "\r\n" + "0\r\n\r\n" + ,.should_keep_alive= FALSE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.status_code= 200 + ,.response_status= "OK" + ,.num_headers= 11 + ,.headers= + { { "Date", "Tue, 28 Sep 2010 01:14:13 GMT" } + , { "Server", "Apache" } + , { "Cache-Control", "no-cache, must-revalidate" } + , { "Expires", "Mon, 26 Jul 1997 05:00:00 GMT" } + , { ".et-Cookie", "PlaxoCS=1274804622353690521; path=/; domain=.plaxo.com" } + , { "Vary", "Accept-Encoding" } + , { "_eep-Alive", "timeout=45" } + , { "_onnection", "Keep-Alive" } + , { "Transfer-Encoding", "chunked" } + , { "Content-Type", "text/html" } + , { "Connection", "close" } + } + ,.body= "" + ,.num_chunks_complete= 1 + ,.chunk_lengths= {} + } + +#define NON_ASCII_IN_STATUS_LINE 10 +/* Should handle non-ASCII in status line */ +, {.name= "non-ASCII in status line" + ,.type= HTTP_RESPONSE + ,.raw= "HTTP/1.1 500 Oriëntatieprobleem\r\n" + "Date: Fri, 5 Nov 2010 23:07:12 GMT+2\r\n" + "Content-Length: 0\r\n" + "Connection: close\r\n" + "\r\n" + ,.should_keep_alive= FALSE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.status_code= 500 + ,.response_status= "Oriëntatieprobleem" + ,.num_headers= 3 + ,.headers= + { { "Date", "Fri, 5 Nov 2010 23:07:12 GMT+2" } + , { "Content-Length", "0" } + , { "Connection", "close" } + } + ,.body= "" + } + +#define HTTP_VERSION_0_9 11 +/* Should handle HTTP/0.9 */ +, {.name= "http version 0.9" + ,.type= HTTP_RESPONSE + ,.raw= "HTTP/0.9 200 OK\r\n" + "\r\n" + ,.should_keep_alive= FALSE + ,.message_complete_on_eof= TRUE + ,.http_major= 0 + ,.http_minor= 9 + ,.status_code= 200 + ,.response_status= "OK" + ,.num_headers= 0 + ,.headers= + {} + ,.body= "" + } + +#define NO_CONTENT_LENGTH_NO_TRANSFER_ENCODING_RESPONSE 12 +/* The client should wait for the server's EOF. That is, when neither + * content-length nor transfer-encoding is specified, the end of body + * is specified by the EOF. + */ +, {.name= "neither content-length nor transfer-encoding response" + ,.type= HTTP_RESPONSE + ,.raw= "HTTP/1.1 200 OK\r\n" + "Content-Type: text/plain\r\n" + "\r\n" + "hello world" + ,.should_keep_alive= FALSE + ,.message_complete_on_eof= TRUE + ,.http_major= 1 + ,.http_minor= 1 + ,.status_code= 200 + ,.response_status= "OK" + ,.num_headers= 1 + ,.headers= + { { "Content-Type", "text/plain" } + } + ,.body= "hello world" + } + +#define NO_BODY_HTTP10_KA_200 13 +, {.name= "HTTP/1.0 with keep-alive and EOF-terminated 200 status" + ,.type= HTTP_RESPONSE + ,.raw= "HTTP/1.0 200 OK\r\n" + "Connection: keep-alive\r\n" + "\r\n" + ,.should_keep_alive= FALSE + ,.message_complete_on_eof= TRUE + ,.http_major= 1 + ,.http_minor= 0 + ,.status_code= 200 + ,.response_status= "OK" + ,.num_headers= 1 + ,.headers= + { { "Connection", "keep-alive" } + } + ,.body_size= 0 + ,.body= "" + } + +#define NO_BODY_HTTP10_KA_204 14 +, {.name= "HTTP/1.0 with keep-alive and a 204 status" + ,.type= HTTP_RESPONSE + ,.raw= "HTTP/1.0 204 No content\r\n" + "Connection: keep-alive\r\n" + "\r\n" + ,.should_keep_alive= TRUE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 0 + ,.status_code= 204 + ,.response_status= "No content" + ,.num_headers= 1 + ,.headers= + { { "Connection", "keep-alive" } + } + ,.body_size= 0 + ,.body= "" + } + +#define NO_BODY_HTTP11_KA_200 15 +, {.name= "HTTP/1.1 with an EOF-terminated 200 status" + ,.type= HTTP_RESPONSE + ,.raw= "HTTP/1.1 200 OK\r\n" + "\r\n" + ,.should_keep_alive= FALSE + ,.message_complete_on_eof= TRUE + ,.http_major= 1 + ,.http_minor= 1 + ,.status_code= 200 + ,.response_status= "OK" + ,.num_headers= 0 + ,.headers={} + ,.body_size= 0 + ,.body= "" + } + +#define NO_BODY_HTTP11_KA_204 16 +, {.name= "HTTP/1.1 with a 204 status" + ,.type= HTTP_RESPONSE + ,.raw= "HTTP/1.1 204 No content\r\n" + "\r\n" + ,.should_keep_alive= TRUE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.status_code= 204 + ,.response_status= "No content" + ,.num_headers= 0 + ,.headers={} + ,.body_size= 0 + ,.body= "" + } + +#define NO_BODY_HTTP11_NOKA_204 17 +, {.name= "HTTP/1.1 with a 204 status and keep-alive disabled" + ,.type= HTTP_RESPONSE + ,.raw= "HTTP/1.1 204 No content\r\n" + "Connection: close\r\n" + "\r\n" + ,.should_keep_alive= FALSE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.status_code= 204 + ,.response_status= "No content" + ,.num_headers= 1 + ,.headers= + { { "Connection", "close" } + } + ,.body_size= 0 + ,.body= "" + } + +#define NO_BODY_HTTP11_KA_CHUNKED_200 18 +, {.name= "HTTP/1.1 with chunked endocing and a 200 response" + ,.type= HTTP_RESPONSE + ,.raw= "HTTP/1.1 200 OK\r\n" + "Transfer-Encoding: chunked\r\n" + "\r\n" + "0\r\n" + "\r\n" + ,.should_keep_alive= TRUE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.status_code= 200 + ,.response_status= "OK" + ,.num_headers= 1 + ,.headers= + { { "Transfer-Encoding", "chunked" } + } + ,.body_size= 0 + ,.body= "" + ,.num_chunks_complete= 1 + } + +#if !HTTP_PARSER_STRICT +#define SPACE_IN_FIELD_RES 19 +/* Should handle spaces in header fields */ +, {.name= "field space" + ,.type= HTTP_RESPONSE + ,.raw= "HTTP/1.1 200 OK\r\n" + "Server: Microsoft-IIS/6.0\r\n" + "X-Powered-By: ASP.NET\r\n" + "en-US Content-Type: text/xml\r\n" /* this is the problem */ + "Content-Type: text/xml\r\n" + "Content-Length: 16\r\n" + "Date: Fri, 23 Jul 2010 18:45:38 GMT\r\n" + "Connection: keep-alive\r\n" + "\r\n" + "hello" /* fake body */ + ,.should_keep_alive= TRUE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.status_code= 200 + ,.response_status= "OK" + ,.num_headers= 7 + ,.headers= + { { "Server", "Microsoft-IIS/6.0" } + , { "X-Powered-By", "ASP.NET" } + , { "en-US Content-Type", "text/xml" } + , { "Content-Type", "text/xml" } + , { "Content-Length", "16" } + , { "Date", "Fri, 23 Jul 2010 18:45:38 GMT" } + , { "Connection", "keep-alive" } + } + ,.body= "hello" + } +#endif /* !HTTP_PARSER_STRICT */ + +#define AMAZON_COM 20 +, {.name= "amazon.com" + ,.type= HTTP_RESPONSE + ,.raw= "HTTP/1.1 301 MovedPermanently\r\n" + "Date: Wed, 15 May 2013 17:06:33 GMT\r\n" + "Server: Server\r\n" + "x-amz-id-1: 0GPHKXSJQ826RK7GZEB2\r\n" + "p3p: policyref=\"http://www.amazon.com/w3c/p3p.xml\",CP=\"CAO DSP LAW CUR ADM IVAo IVDo CONo OTPo OUR DELi PUBi OTRi BUS PHY ONL UNI PUR FIN COM NAV INT DEM CNT STA HEA PRE LOC GOV OTC \"\r\n" + "x-amz-id-2: STN69VZxIFSz9YJLbz1GDbxpbjG6Qjmmq5E3DxRhOUw+Et0p4hr7c/Q8qNcx4oAD\r\n" + "Location: http://www.amazon.com/Dan-Brown/e/B000AP9DSU/ref=s9_pop_gw_al1?_encoding=UTF8&refinementId=618073011&pf_rd_m=ATVPDKIKX0DER&pf_rd_s=center-2&pf_rd_r=0SHYY5BZXN3KR20BNFAY&pf_rd_t=101&pf_rd_p=1263340922&pf_rd_i=507846\r\n" + "Vary: Accept-Encoding,User-Agent\r\n" + "Content-Type: text/html; charset=ISO-8859-1\r\n" + "Transfer-Encoding: chunked\r\n" + "\r\n" + "1\r\n" + "\n\r\n" + "0\r\n" + "\r\n" + ,.should_keep_alive= TRUE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 1 + ,.status_code= 301 + ,.response_status= "MovedPermanently" + ,.num_headers= 9 + ,.headers= { { "Date", "Wed, 15 May 2013 17:06:33 GMT" } + , { "Server", "Server" } + , { "x-amz-id-1", "0GPHKXSJQ826RK7GZEB2" } + , { "p3p", "policyref=\"http://www.amazon.com/w3c/p3p.xml\",CP=\"CAO DSP LAW CUR ADM IVAo IVDo CONo OTPo OUR DELi PUBi OTRi BUS PHY ONL UNI PUR FIN COM NAV INT DEM CNT STA HEA PRE LOC GOV OTC \"" } + , { "x-amz-id-2", "STN69VZxIFSz9YJLbz1GDbxpbjG6Qjmmq5E3DxRhOUw+Et0p4hr7c/Q8qNcx4oAD" } + , { "Location", "http://www.amazon.com/Dan-Brown/e/B000AP9DSU/ref=s9_pop_gw_al1?_encoding=UTF8&refinementId=618073011&pf_rd_m=ATVPDKIKX0DER&pf_rd_s=center-2&pf_rd_r=0SHYY5BZXN3KR20BNFAY&pf_rd_t=101&pf_rd_p=1263340922&pf_rd_i=507846" } + , { "Vary", "Accept-Encoding,User-Agent" } + , { "Content-Type", "text/html; charset=ISO-8859-1" } + , { "Transfer-Encoding", "chunked" } + } + ,.body= "\n" + ,.num_chunks_complete= 2 + ,.chunk_lengths= { 1 } + } + +#define EMPTY_REASON_PHRASE_AFTER_SPACE 20 +, {.name= "empty reason phrase after space" + ,.type= HTTP_RESPONSE + ,.raw= "HTTP/1.1 200 \r\n" + "\r\n" + ,.should_keep_alive= FALSE + ,.message_complete_on_eof= TRUE + ,.http_major= 1 + ,.http_minor= 1 + ,.status_code= 200 + ,.response_status= "" + ,.num_headers= 0 + ,.headers= {} + ,.body= "" + } + +, {.name= NULL } /* sentinel */ +}; + +/* strnlen() is a POSIX.2008 addition. Can't rely on it being available so + * define it ourselves. + */ +size_t +strnlen(const char *s, size_t maxlen) +{ + const char *p; + + p = memchr(s, '\0', maxlen); + if (p == NULL) + return maxlen; + + return p - s; +} + +size_t +strlncat(char *dst, size_t len, const char *src, size_t n) +{ + size_t slen; + size_t dlen; + size_t rlen; + size_t ncpy; + + slen = strnlen(src, n); + dlen = strnlen(dst, len); + + if (dlen < len) { + rlen = len - dlen; + ncpy = slen < rlen ? slen : (rlen - 1); + memcpy(dst + dlen, src, ncpy); + dst[dlen + ncpy] = '\0'; + } + + assert(len > slen + dlen); + return slen + dlen; +} + +size_t +strlcat(char *dst, const char *src, size_t len) +{ + return strlncat(dst, len, src, (size_t) -1); +} + +size_t +strlncpy(char *dst, size_t len, const char *src, size_t n) +{ + size_t slen; + size_t ncpy; + + slen = strnlen(src, n); + + if (len > 0) { + ncpy = slen < len ? slen : (len - 1); + memcpy(dst, src, ncpy); + dst[ncpy] = '\0'; + } + + assert(len > slen); + return slen; +} + +size_t +strlcpy(char *dst, const char *src, size_t len) +{ + return strlncpy(dst, len, src, (size_t) -1); +} + +int +request_url_cb (http_parser *p, const char *buf, size_t len) +{ + assert(p == parser); + strlncat(messages[num_messages].request_url, + sizeof(messages[num_messages].request_url), + buf, + len); + return 0; +} + +int +header_field_cb (http_parser *p, const char *buf, size_t len) +{ + assert(p == parser); + struct message *m = &messages[num_messages]; + + if (m->last_header_element != FIELD) + m->num_headers++; + + strlncat(m->headers[m->num_headers-1][0], + sizeof(m->headers[m->num_headers-1][0]), + buf, + len); + + m->last_header_element = FIELD; + + return 0; +} + +int +header_value_cb (http_parser *p, const char *buf, size_t len) +{ + assert(p == parser); + struct message *m = &messages[num_messages]; + + strlncat(m->headers[m->num_headers-1][1], + sizeof(m->headers[m->num_headers-1][1]), + buf, + len); + + m->last_header_element = VALUE; + + return 0; +} + +void +check_body_is_final (const http_parser *p) +{ + if (messages[num_messages].body_is_final) { + fprintf(stderr, "\n\n *** Error http_body_is_final() should return 1 " + "on last on_body callback call " + "but it doesn't! ***\n\n"); + assert(0); + abort(); + } + messages[num_messages].body_is_final = http_body_is_final(p); +} + +int +body_cb (http_parser *p, const char *buf, size_t len) +{ + assert(p == parser); + strlncat(messages[num_messages].body, + sizeof(messages[num_messages].body), + buf, + len); + messages[num_messages].body_size += len; + check_body_is_final(p); + // printf("body_cb: '%s'\n", requests[num_messages].body); + return 0; +} + +int +count_body_cb (http_parser *p, const char *buf, size_t len) +{ + assert(p == parser); + assert(buf); + messages[num_messages].body_size += len; + check_body_is_final(p); + return 0; +} + +int +message_begin_cb (http_parser *p) +{ + assert(p == parser); + messages[num_messages].message_begin_cb_called = TRUE; + return 0; +} + +int +headers_complete_cb (http_parser *p) +{ + assert(p == parser); + messages[num_messages].method = parser->method; + messages[num_messages].status_code = parser->status_code; + messages[num_messages].http_major = parser->http_major; + messages[num_messages].http_minor = parser->http_minor; + messages[num_messages].headers_complete_cb_called = TRUE; + messages[num_messages].should_keep_alive = http_should_keep_alive(parser); + return 0; +} + +int +message_complete_cb (http_parser *p) +{ + assert(p == parser); + if (messages[num_messages].should_keep_alive != http_should_keep_alive(parser)) + { + fprintf(stderr, "\n\n *** Error http_should_keep_alive() should have same " + "value in both on_message_complete and on_headers_complete " + "but it doesn't! ***\n\n"); + assert(0); + abort(); + } + + if (messages[num_messages].body_size && + http_body_is_final(p) && + !messages[num_messages].body_is_final) + { + fprintf(stderr, "\n\n *** Error http_body_is_final() should return 1 " + "on last on_body callback call " + "but it doesn't! ***\n\n"); + assert(0); + abort(); + } + + messages[num_messages].message_complete_cb_called = TRUE; + + messages[num_messages].message_complete_on_eof = currently_parsing_eof; + + num_messages++; + return 0; +} + +int +response_status_cb (http_parser *p, const char *buf, size_t len) +{ + assert(p == parser); + strlncat(messages[num_messages].response_status, + sizeof(messages[num_messages].response_status), + buf, + len); + return 0; +} + +int +chunk_header_cb (http_parser *p) +{ + assert(p == parser); + int chunk_idx = messages[num_messages].num_chunks; + messages[num_messages].num_chunks++; + if (chunk_idx < MAX_CHUNKS) { + messages[num_messages].chunk_lengths[chunk_idx] = p->content_length; + } + + return 0; +} + +int +chunk_complete_cb (http_parser *p) +{ + assert(p == parser); + + /* Here we want to verify that each chunk_header_cb is matched by a + * chunk_complete_cb, so not only should the total number of calls to + * both callbacks be the same, but they also should be interleaved + * properly */ + assert(messages[num_messages].num_chunks == + messages[num_messages].num_chunks_complete + 1); + + messages[num_messages].num_chunks_complete++; + return 0; +} + +/* These dontcall_* callbacks exist so that we can verify that when we're + * paused, no additional callbacks are invoked */ +int +dontcall_message_begin_cb (http_parser *p) +{ + if (p) { } // gcc + fprintf(stderr, "\n\n*** on_message_begin() called on paused parser ***\n\n"); + abort(); +} + +int +dontcall_header_field_cb (http_parser *p, const char *buf, size_t len) +{ + if (p || buf || len) { } // gcc + fprintf(stderr, "\n\n*** on_header_field() called on paused parser ***\n\n"); + abort(); +} + +int +dontcall_header_value_cb (http_parser *p, const char *buf, size_t len) +{ + if (p || buf || len) { } // gcc + fprintf(stderr, "\n\n*** on_header_value() called on paused parser ***\n\n"); + abort(); +} + +int +dontcall_request_url_cb (http_parser *p, const char *buf, size_t len) +{ + if (p || buf || len) { } // gcc + fprintf(stderr, "\n\n*** on_request_url() called on paused parser ***\n\n"); + abort(); +} + +int +dontcall_body_cb (http_parser *p, const char *buf, size_t len) +{ + if (p || buf || len) { } // gcc + fprintf(stderr, "\n\n*** on_body_cb() called on paused parser ***\n\n"); + abort(); +} + +int +dontcall_headers_complete_cb (http_parser *p) +{ + if (p) { } // gcc + fprintf(stderr, "\n\n*** on_headers_complete() called on paused " + "parser ***\n\n"); + abort(); +} + +int +dontcall_message_complete_cb (http_parser *p) +{ + if (p) { } // gcc + fprintf(stderr, "\n\n*** on_message_complete() called on paused " + "parser ***\n\n"); + abort(); +} + +int +dontcall_response_status_cb (http_parser *p, const char *buf, size_t len) +{ + if (p || buf || len) { } // gcc + fprintf(stderr, "\n\n*** on_status() called on paused parser ***\n\n"); + abort(); +} + +int +dontcall_chunk_header_cb (http_parser *p) +{ + if (p) { } // gcc + fprintf(stderr, "\n\n*** on_chunk_header() called on paused parser ***\n\n"); + exit(1); +} + +int +dontcall_chunk_complete_cb (http_parser *p) +{ + if (p) { } // gcc + fprintf(stderr, "\n\n*** on_chunk_complete() " + "called on paused parser ***\n\n"); + exit(1); +} + +static http_parser_settings settings_dontcall = + {.on_message_begin = dontcall_message_begin_cb + ,.on_header_field = dontcall_header_field_cb + ,.on_header_value = dontcall_header_value_cb + ,.on_url = dontcall_request_url_cb + ,.on_status = dontcall_response_status_cb + ,.on_body = dontcall_body_cb + ,.on_headers_complete = dontcall_headers_complete_cb + ,.on_message_complete = dontcall_message_complete_cb + ,.on_chunk_header = dontcall_chunk_header_cb + ,.on_chunk_complete = dontcall_chunk_complete_cb + }; + +/* These pause_* callbacks always pause the parser and just invoke the regular + * callback that tracks content. Before returning, we overwrite the parser + * settings to point to the _dontcall variety so that we can verify that + * the pause actually did, you know, pause. */ +int +pause_message_begin_cb (http_parser *p) +{ + http_parser_pause(p, 1); + *current_pause_parser = settings_dontcall; + return message_begin_cb(p); +} + +int +pause_header_field_cb (http_parser *p, const char *buf, size_t len) +{ + http_parser_pause(p, 1); + *current_pause_parser = settings_dontcall; + return header_field_cb(p, buf, len); +} + +int +pause_header_value_cb (http_parser *p, const char *buf, size_t len) +{ + http_parser_pause(p, 1); + *current_pause_parser = settings_dontcall; + return header_value_cb(p, buf, len); +} + +int +pause_request_url_cb (http_parser *p, const char *buf, size_t len) +{ + http_parser_pause(p, 1); + *current_pause_parser = settings_dontcall; + return request_url_cb(p, buf, len); +} + +int +pause_body_cb (http_parser *p, const char *buf, size_t len) +{ + http_parser_pause(p, 1); + *current_pause_parser = settings_dontcall; + return body_cb(p, buf, len); +} + +int +pause_headers_complete_cb (http_parser *p) +{ + http_parser_pause(p, 1); + *current_pause_parser = settings_dontcall; + return headers_complete_cb(p); +} + +int +pause_message_complete_cb (http_parser *p) +{ + http_parser_pause(p, 1); + *current_pause_parser = settings_dontcall; + return message_complete_cb(p); +} + +int +pause_response_status_cb (http_parser *p, const char *buf, size_t len) +{ + http_parser_pause(p, 1); + *current_pause_parser = settings_dontcall; + return response_status_cb(p, buf, len); +} + +int +pause_chunk_header_cb (http_parser *p) +{ + http_parser_pause(p, 1); + *current_pause_parser = settings_dontcall; + return chunk_header_cb(p); +} + +int +pause_chunk_complete_cb (http_parser *p) +{ + http_parser_pause(p, 1); + *current_pause_parser = settings_dontcall; + return chunk_complete_cb(p); +} + +static http_parser_settings settings_pause = + {.on_message_begin = pause_message_begin_cb + ,.on_header_field = pause_header_field_cb + ,.on_header_value = pause_header_value_cb + ,.on_url = pause_request_url_cb + ,.on_status = pause_response_status_cb + ,.on_body = pause_body_cb + ,.on_headers_complete = pause_headers_complete_cb + ,.on_message_complete = pause_message_complete_cb + ,.on_chunk_header = pause_chunk_header_cb + ,.on_chunk_complete = pause_chunk_complete_cb + }; + +static http_parser_settings settings = + {.on_message_begin = message_begin_cb + ,.on_header_field = header_field_cb + ,.on_header_value = header_value_cb + ,.on_url = request_url_cb + ,.on_status = response_status_cb + ,.on_body = body_cb + ,.on_headers_complete = headers_complete_cb + ,.on_message_complete = message_complete_cb + ,.on_chunk_header = chunk_header_cb + ,.on_chunk_complete = chunk_complete_cb + }; + +static http_parser_settings settings_count_body = + {.on_message_begin = message_begin_cb + ,.on_header_field = header_field_cb + ,.on_header_value = header_value_cb + ,.on_url = request_url_cb + ,.on_status = response_status_cb + ,.on_body = count_body_cb + ,.on_headers_complete = headers_complete_cb + ,.on_message_complete = message_complete_cb + ,.on_chunk_header = chunk_header_cb + ,.on_chunk_complete = chunk_complete_cb + }; + +static http_parser_settings settings_null = + {.on_message_begin = 0 + ,.on_header_field = 0 + ,.on_header_value = 0 + ,.on_url = 0 + ,.on_status = 0 + ,.on_body = 0 + ,.on_headers_complete = 0 + ,.on_message_complete = 0 + ,.on_chunk_header = 0 + ,.on_chunk_complete = 0 + }; + +void +parser_init (enum http_parser_type type) +{ + num_messages = 0; + + assert(parser == NULL); + + parser = malloc(sizeof(http_parser)); + + http_parser_init(parser, type); + + memset(&messages, 0, sizeof messages); + +} + +void +parser_free () +{ + assert(parser); + free(parser); + parser = NULL; +} + +size_t parse (const char *buf, size_t len) +{ + size_t nparsed; + currently_parsing_eof = (len == 0); + nparsed = http_parser_execute(parser, &settings, buf, len); + return nparsed; +} + +size_t parse_count_body (const char *buf, size_t len) +{ + size_t nparsed; + currently_parsing_eof = (len == 0); + nparsed = http_parser_execute(parser, &settings_count_body, buf, len); + return nparsed; +} + +size_t parse_pause (const char *buf, size_t len) +{ + size_t nparsed; + http_parser_settings s = settings_pause; + + currently_parsing_eof = (len == 0); + current_pause_parser = &s; + nparsed = http_parser_execute(parser, current_pause_parser, buf, len); + return nparsed; +} + +static inline int +check_str_eq (const struct message *m, + const char *prop, + const char *expected, + const char *found) { + if ((expected == NULL) != (found == NULL)) { + printf("\n*** Error: %s in '%s' ***\n\n", prop, m->name); + printf("expected %s\n", (expected == NULL) ? "NULL" : expected); + printf(" found %s\n", (found == NULL) ? "NULL" : found); + return 0; + } + if (expected != NULL && 0 != strcmp(expected, found)) { + printf("\n*** Error: %s in '%s' ***\n\n", prop, m->name); + printf("expected '%s'\n", expected); + printf(" found '%s'\n", found); + return 0; + } + return 1; +} + +static inline int +check_num_eq (const struct message *m, + const char *prop, + int expected, + int found) { + if (expected != found) { + printf("\n*** Error: %s in '%s' ***\n\n", prop, m->name); + printf("expected %d\n", expected); + printf(" found %d\n", found); + return 0; + } + return 1; +} + +#define MESSAGE_CHECK_STR_EQ(expected, found, prop) \ + if (!check_str_eq(expected, #prop, expected->prop, found->prop)) return 0 + +#define MESSAGE_CHECK_NUM_EQ(expected, found, prop) \ + if (!check_num_eq(expected, #prop, expected->prop, found->prop)) return 0 + +#define MESSAGE_CHECK_URL_EQ(u, expected, found, prop, fn) \ +do { \ + char ubuf[256]; \ + \ + if ((u)->field_set & (1 << (fn))) { \ + memcpy(ubuf, (found)->request_url + (u)->field_data[(fn)].off, \ + (u)->field_data[(fn)].len); \ + ubuf[(u)->field_data[(fn)].len] = '\0'; \ + } else { \ + ubuf[0] = '\0'; \ + } \ + \ + check_str_eq(expected, #prop, expected->prop, ubuf); \ +} while(0) + +int +message_eq (int index, const struct message *expected) +{ + int i; + struct message *m = &messages[index]; + + MESSAGE_CHECK_NUM_EQ(expected, m, http_major); + MESSAGE_CHECK_NUM_EQ(expected, m, http_minor); + + if (expected->type == HTTP_REQUEST) { + MESSAGE_CHECK_NUM_EQ(expected, m, method); + } else { + MESSAGE_CHECK_NUM_EQ(expected, m, status_code); + MESSAGE_CHECK_STR_EQ(expected, m, response_status); + } + + MESSAGE_CHECK_NUM_EQ(expected, m, should_keep_alive); + MESSAGE_CHECK_NUM_EQ(expected, m, message_complete_on_eof); + + assert(m->message_begin_cb_called); + assert(m->headers_complete_cb_called); + assert(m->message_complete_cb_called); + + + MESSAGE_CHECK_STR_EQ(expected, m, request_url); + + /* Check URL components; we can't do this w/ CONNECT since it doesn't + * send us a well-formed URL. + */ + if (*m->request_url && m->method != HTTP_CONNECT) { + struct http_parser_url u; + + if (http_parser_parse_url(m->request_url, strlen(m->request_url), 0, &u)) { + fprintf(stderr, "\n\n*** failed to parse URL %s ***\n\n", + m->request_url); + abort(); + } + + if (expected->host) { + MESSAGE_CHECK_URL_EQ(&u, expected, m, host, UF_HOST); + } + + if (expected->userinfo) { + MESSAGE_CHECK_URL_EQ(&u, expected, m, userinfo, UF_USERINFO); + } + + m->port = (u.field_set & (1 << UF_PORT)) ? + u.port : 0; + + MESSAGE_CHECK_URL_EQ(&u, expected, m, query_string, UF_QUERY); + MESSAGE_CHECK_URL_EQ(&u, expected, m, fragment, UF_FRAGMENT); + MESSAGE_CHECK_URL_EQ(&u, expected, m, request_path, UF_PATH); + MESSAGE_CHECK_NUM_EQ(expected, m, port); + } + + if (expected->body_size) { + MESSAGE_CHECK_NUM_EQ(expected, m, body_size); + } else { + MESSAGE_CHECK_STR_EQ(expected, m, body); + } + + assert(m->num_chunks == m->num_chunks_complete); + MESSAGE_CHECK_NUM_EQ(expected, m, num_chunks_complete); + for (i = 0; i < m->num_chunks && i < MAX_CHUNKS; i++) { + MESSAGE_CHECK_NUM_EQ(expected, m, chunk_lengths[i]); + } + + MESSAGE_CHECK_NUM_EQ(expected, m, num_headers); + + int r; + for (i = 0; i < m->num_headers; i++) { + r = check_str_eq(expected, "header field", expected->headers[i][0], m->headers[i][0]); + if (!r) return 0; + r = check_str_eq(expected, "header value", expected->headers[i][1], m->headers[i][1]); + if (!r) return 0; + } + + MESSAGE_CHECK_STR_EQ(expected, m, upgrade); + + return 1; +} + +/* Given a sequence of varargs messages, return the number of them that the + * parser should successfully parse, taking into account that upgraded + * messages prevent all subsequent messages from being parsed. + */ +size_t +count_parsed_messages(const size_t nmsgs, ...) { + size_t i; + va_list ap; + + va_start(ap, nmsgs); + + for (i = 0; i < nmsgs; i++) { + struct message *m = va_arg(ap, struct message *); + + if (m->upgrade) { + va_end(ap); + return i + 1; + } + } + + va_end(ap); + return nmsgs; +} + +/* Given a sequence of bytes and the number of these that we were able to + * parse, verify that upgrade bodies are correct. + */ +void +upgrade_message_fix(char *body, const size_t nread, const size_t nmsgs, ...) { + va_list ap; + size_t i; + size_t off = 0; + + va_start(ap, nmsgs); + + for (i = 0; i < nmsgs; i++) { + struct message *m = va_arg(ap, struct message *); + + off += strlen(m->raw); + + if (m->upgrade) { + off -= strlen(m->upgrade); + + /* Check the portion of the response after its specified upgrade */ + if (!check_str_eq(m, "upgrade", body + off, body + nread)) { + abort(); + } + + /* Fix up the response so that message_eq() will verify the beginning + * of the upgrade */ + *(body + nread + strlen(m->upgrade)) = '\0'; + messages[num_messages -1 ].upgrade = body + nread; + + va_end(ap); + return; + } + } + + va_end(ap); + printf("\n\n*** Error: expected a message with upgrade ***\n"); + + abort(); +} + +static void +print_error (const char *raw, size_t error_location) +{ + fprintf(stderr, "\n*** %s ***\n\n", + http_errno_description(HTTP_PARSER_ERRNO(parser))); + + int this_line = 0, char_len = 0; + size_t i, j, len = strlen(raw), error_location_line = 0; + for (i = 0; i < len; i++) { + if (i == error_location) this_line = 1; + switch (raw[i]) { + case '\r': + char_len = 2; + fprintf(stderr, "\\r"); + break; + + case '\n': + fprintf(stderr, "\\n\n"); + + if (this_line) goto print; + + error_location_line = 0; + continue; + + default: + char_len = 1; + fputc(raw[i], stderr); + break; + } + if (!this_line) error_location_line += char_len; + } + + fprintf(stderr, "[eof]\n"); + + print: + for (j = 0; j < error_location_line; j++) { + fputc(' ', stderr); + } + fprintf(stderr, "^\n\nerror location: %u\n", (unsigned int)error_location); +} + +void +test_preserve_data (void) +{ + char my_data[] = "application-specific data"; + http_parser parser; + parser.data = my_data; + http_parser_init(&parser, HTTP_REQUEST); + if (parser.data != my_data) { + printf("\n*** parser.data not preserved accross http_parser_init ***\n\n"); + abort(); + } +} + +struct url_test { + const char *name; + const char *url; + int is_connect; + struct http_parser_url u; + int rv; +}; + +const struct url_test url_tests[] = +{ {.name="proxy request" + ,.url="http://hostname/" + ,.is_connect=0 + ,.u= + {.field_set=(1 << UF_SCHEMA) | (1 << UF_HOST) | (1 << UF_PATH) + ,.port=0 + ,.field_data= + {{ 0, 4 } /* UF_SCHEMA */ + ,{ 7, 8 } /* UF_HOST */ + ,{ 0, 0 } /* UF_PORT */ + ,{ 15, 1 } /* UF_PATH */ + ,{ 0, 0 } /* UF_QUERY */ + ,{ 0, 0 } /* UF_FRAGMENT */ + ,{ 0, 0 } /* UF_USERINFO */ + } + } + ,.rv=0 + } + +, {.name="proxy request with port" + ,.url="http://hostname:444/" + ,.is_connect=0 + ,.u= + {.field_set=(1 << UF_SCHEMA) | (1 << UF_HOST) | (1 << UF_PORT) | (1 << UF_PATH) + ,.port=444 + ,.field_data= + {{ 0, 4 } /* UF_SCHEMA */ + ,{ 7, 8 } /* UF_HOST */ + ,{ 16, 3 } /* UF_PORT */ + ,{ 19, 1 } /* UF_PATH */ + ,{ 0, 0 } /* UF_QUERY */ + ,{ 0, 0 } /* UF_FRAGMENT */ + ,{ 0, 0 } /* UF_USERINFO */ + } + } + ,.rv=0 + } + +, {.name="CONNECT request" + ,.url="hostname:443" + ,.is_connect=1 + ,.u= + {.field_set=(1 << UF_HOST) | (1 << UF_PORT) + ,.port=443 + ,.field_data= + {{ 0, 0 } /* UF_SCHEMA */ + ,{ 0, 8 } /* UF_HOST */ + ,{ 9, 3 } /* UF_PORT */ + ,{ 0, 0 } /* UF_PATH */ + ,{ 0, 0 } /* UF_QUERY */ + ,{ 0, 0 } /* UF_FRAGMENT */ + ,{ 0, 0 } /* UF_USERINFO */ + } + } + ,.rv=0 + } + +, {.name="CONNECT request but not connect" + ,.url="hostname:443" + ,.is_connect=0 + ,.rv=1 + } + +, {.name="proxy ipv6 request" + ,.url="http://[1:2::3:4]/" + ,.is_connect=0 + ,.u= + {.field_set=(1 << UF_SCHEMA) | (1 << UF_HOST) | (1 << UF_PATH) + ,.port=0 + ,.field_data= + {{ 0, 4 } /* UF_SCHEMA */ + ,{ 8, 8 } /* UF_HOST */ + ,{ 0, 0 } /* UF_PORT */ + ,{ 17, 1 } /* UF_PATH */ + ,{ 0, 0 } /* UF_QUERY */ + ,{ 0, 0 } /* UF_FRAGMENT */ + ,{ 0, 0 } /* UF_USERINFO */ + } + } + ,.rv=0 + } + +, {.name="proxy ipv6 request with port" + ,.url="http://[1:2::3:4]:67/" + ,.is_connect=0 + ,.u= + {.field_set=(1 << UF_SCHEMA) | (1 << UF_HOST) | (1 << UF_PORT) | (1 << UF_PATH) + ,.port=67 + ,.field_data= + {{ 0, 4 } /* UF_SCHEMA */ + ,{ 8, 8 } /* UF_HOST */ + ,{ 18, 2 } /* UF_PORT */ + ,{ 20, 1 } /* UF_PATH */ + ,{ 0, 0 } /* UF_QUERY */ + ,{ 0, 0 } /* UF_FRAGMENT */ + ,{ 0, 0 } /* UF_USERINFO */ + } + } + ,.rv=0 + } + +, {.name="CONNECT ipv6 address" + ,.url="[1:2::3:4]:443" + ,.is_connect=1 + ,.u= + {.field_set=(1 << UF_HOST) | (1 << UF_PORT) + ,.port=443 + ,.field_data= + {{ 0, 0 } /* UF_SCHEMA */ + ,{ 1, 8 } /* UF_HOST */ + ,{ 11, 3 } /* UF_PORT */ + ,{ 0, 0 } /* UF_PATH */ + ,{ 0, 0 } /* UF_QUERY */ + ,{ 0, 0 } /* UF_FRAGMENT */ + ,{ 0, 0 } /* UF_USERINFO */ + } + } + ,.rv=0 + } + +, {.name="ipv4 in ipv6 address" + ,.url="http://[2001:0000:0000:0000:0000:0000:1.9.1.1]/" + ,.is_connect=0 + ,.u= + {.field_set=(1 << UF_SCHEMA) | (1 << UF_HOST) | (1 << UF_PATH) + ,.port=0 + ,.field_data= + {{ 0, 4 } /* UF_SCHEMA */ + ,{ 8, 37 } /* UF_HOST */ + ,{ 0, 0 } /* UF_PORT */ + ,{ 46, 1 } /* UF_PATH */ + ,{ 0, 0 } /* UF_QUERY */ + ,{ 0, 0 } /* UF_FRAGMENT */ + ,{ 0, 0 } /* UF_USERINFO */ + } + } + ,.rv=0 + } + +, {.name="extra ? in query string" + ,.url="http://a.tbcdn.cn/p/fp/2010c/??fp-header-min.css,fp-base-min.css," + "fp-channel-min.css,fp-product-min.css,fp-mall-min.css,fp-category-min.css," + "fp-sub-min.css,fp-gdp4p-min.css,fp-css3-min.css,fp-misc-min.css?t=20101022.css" + ,.is_connect=0 + ,.u= + {.field_set=(1<field_set, u->port); + for (i = 0; i < UF_MAX; i++) { + if ((u->field_set & (1 << i)) == 0) { + printf("\tfield_data[%u]: unset\n", i); + continue; + } + + printf("\tfield_data[%u]: off: %u len: %u part: \"%.*s\n\"", + i, + u->field_data[i].off, + u->field_data[i].len, + u->field_data[i].len, + url + u->field_data[i].off); + } +} + +void +test_parse_url (void) +{ + struct http_parser_url u; + const struct url_test *test; + unsigned int i; + int rv; + + for (i = 0; i < (sizeof(url_tests) / sizeof(url_tests[0])); i++) { + test = &url_tests[i]; + memset(&u, 0, sizeof(u)); + + rv = http_parser_parse_url(test->url, + strlen(test->url), + test->is_connect, + &u); + + if (test->rv == 0) { + if (rv != 0) { + printf("\n*** http_parser_parse_url(\"%s\") \"%s\" test failed, " + "unexpected rv %d ***\n\n", test->url, test->name, rv); + abort(); + } + + if (memcmp(&u, &test->u, sizeof(u)) != 0) { + printf("\n*** http_parser_parse_url(\"%s\") \"%s\" failed ***\n", + test->url, test->name); + + printf("target http_parser_url:\n"); + dump_url(test->url, &test->u); + printf("result http_parser_url:\n"); + dump_url(test->url, &u); + + abort(); + } + } else { + /* test->rv != 0 */ + if (rv == 0) { + printf("\n*** http_parser_parse_url(\"%s\") \"%s\" test failed, " + "unexpected rv %d ***\n\n", test->url, test->name, rv); + abort(); + } + } + } +} + +void +test_method_str (void) +{ + assert(0 == strcmp("GET", http_method_str(HTTP_GET))); + assert(0 == strcmp("", http_method_str(1337))); +} + +void +test_message (const struct message *message) +{ + size_t raw_len = strlen(message->raw); + size_t msg1len; + for (msg1len = 0; msg1len < raw_len; msg1len++) { + parser_init(message->type); + + size_t read; + const char *msg1 = message->raw; + const char *msg2 = msg1 + msg1len; + size_t msg2len = raw_len - msg1len; + + if (msg1len) { + read = parse(msg1, msg1len); + + if (message->upgrade && parser->upgrade && num_messages > 0) { + messages[num_messages - 1].upgrade = msg1 + read; + goto test; + } + + if (read != msg1len) { + print_error(msg1, read); + abort(); + } + } + + + read = parse(msg2, msg2len); + + if (message->upgrade && parser->upgrade) { + messages[num_messages - 1].upgrade = msg2 + read; + goto test; + } + + if (read != msg2len) { + print_error(msg2, read); + abort(); + } + + read = parse(NULL, 0); + + if (read != 0) { + print_error(message->raw, read); + abort(); + } + + test: + + if (num_messages != 1) { + printf("\n*** num_messages != 1 after testing '%s' ***\n\n", message->name); + abort(); + } + + if(!message_eq(0, message)) abort(); + + parser_free(); + } +} + +void +test_message_count_body (const struct message *message) +{ + parser_init(message->type); + + size_t read; + size_t l = strlen(message->raw); + size_t i, toread; + size_t chunk = 4024; + + for (i = 0; i < l; i+= chunk) { + toread = MIN(l-i, chunk); + read = parse_count_body(message->raw + i, toread); + if (read != toread) { + print_error(message->raw, read); + abort(); + } + } + + + read = parse_count_body(NULL, 0); + if (read != 0) { + print_error(message->raw, read); + abort(); + } + + if (num_messages != 1) { + printf("\n*** num_messages != 1 after testing '%s' ***\n\n", message->name); + abort(); + } + + if(!message_eq(0, message)) abort(); + + parser_free(); +} + +void +test_simple (const char *buf, enum http_errno err_expected) +{ + parser_init(HTTP_REQUEST); + + enum http_errno err; + + parse(buf, strlen(buf)); + err = HTTP_PARSER_ERRNO(parser); + parse(NULL, 0); + + parser_free(); + + /* In strict mode, allow us to pass with an unexpected HPE_STRICT as + * long as the caller isn't expecting success. + */ +#if HTTP_PARSER_STRICT + if (err_expected != err && err_expected != HPE_OK && err != HPE_STRICT) { +#else + if (err_expected != err) { +#endif + fprintf(stderr, "\n*** test_simple expected %s, but saw %s ***\n\n%s\n", + http_errno_name(err_expected), http_errno_name(err), buf); + abort(); + } +} + +void +test_header_overflow_error (int req) +{ + http_parser parser; + http_parser_init(&parser, req ? HTTP_REQUEST : HTTP_RESPONSE); + size_t parsed; + const char *buf; + buf = req ? "GET / HTTP/1.1\r\n" : "HTTP/1.0 200 OK\r\n"; + parsed = http_parser_execute(&parser, &settings_null, buf, strlen(buf)); + assert(parsed == strlen(buf)); + + buf = "header-key: header-value\r\n"; + size_t buflen = strlen(buf); + + int i; + for (i = 0; i < 10000; i++) { + parsed = http_parser_execute(&parser, &settings_null, buf, buflen); + if (parsed != buflen) { + //fprintf(stderr, "error found on iter %d\n", i); + assert(HTTP_PARSER_ERRNO(&parser) == HPE_HEADER_OVERFLOW); + return; + } + } + + fprintf(stderr, "\n*** Error expected but none in header overflow test ***\n"); + abort(); +} + + +void +test_header_nread_value () +{ + http_parser parser; + http_parser_init(&parser, HTTP_REQUEST); + size_t parsed; + const char *buf; + buf = "GET / HTTP/1.1\r\nheader: value\nhdr: value\r\n"; + parsed = http_parser_execute(&parser, &settings_null, buf, strlen(buf)); + assert(parsed == strlen(buf)); + + assert(parser.nread == strlen(buf)); +} + + +static void +test_content_length_overflow (const char *buf, size_t buflen, int expect_ok) +{ + http_parser parser; + http_parser_init(&parser, HTTP_RESPONSE); + http_parser_execute(&parser, &settings_null, buf, buflen); + + if (expect_ok) + assert(HTTP_PARSER_ERRNO(&parser) == HPE_OK); + else + assert(HTTP_PARSER_ERRNO(&parser) == HPE_INVALID_CONTENT_LENGTH); +} + +void +test_header_content_length_overflow_error (void) +{ +#define X(size) \ + "HTTP/1.1 200 OK\r\n" \ + "Content-Length: " #size "\r\n" \ + "\r\n" + const char a[] = X(1844674407370955160); /* 2^64 / 10 - 1 */ + const char b[] = X(18446744073709551615); /* 2^64-1 */ + const char c[] = X(18446744073709551616); /* 2^64 */ +#undef X + test_content_length_overflow(a, sizeof(a) - 1, 1); /* expect ok */ + test_content_length_overflow(b, sizeof(b) - 1, 0); /* expect failure */ + test_content_length_overflow(c, sizeof(c) - 1, 0); /* expect failure */ +} + +void +test_chunk_content_length_overflow_error (void) +{ +#define X(size) \ + "HTTP/1.1 200 OK\r\n" \ + "Transfer-Encoding: chunked\r\n" \ + "\r\n" \ + #size "\r\n" \ + "..." + const char a[] = X(FFFFFFFFFFFFFFE); /* 2^64 / 16 - 1 */ + const char b[] = X(FFFFFFFFFFFFFFFF); /* 2^64-1 */ + const char c[] = X(10000000000000000); /* 2^64 */ +#undef X + test_content_length_overflow(a, sizeof(a) - 1, 1); /* expect ok */ + test_content_length_overflow(b, sizeof(b) - 1, 0); /* expect failure */ + test_content_length_overflow(c, sizeof(c) - 1, 0); /* expect failure */ +} + +void +test_no_overflow_long_body (int req, size_t length) +{ + http_parser parser; + http_parser_init(&parser, req ? HTTP_REQUEST : HTTP_RESPONSE); + size_t parsed; + size_t i; + char buf1[3000]; + size_t buf1len = sprintf(buf1, "%s\r\nConnection: Keep-Alive\r\nContent-Length: %lu\r\n\r\n", + req ? "POST / HTTP/1.0" : "HTTP/1.0 200 OK", (unsigned long)length); + parsed = http_parser_execute(&parser, &settings_null, buf1, buf1len); + if (parsed != buf1len) + goto err; + + for (i = 0; i < length; i++) { + char foo = 'a'; + parsed = http_parser_execute(&parser, &settings_null, &foo, 1); + if (parsed != 1) + goto err; + } + + parsed = http_parser_execute(&parser, &settings_null, buf1, buf1len); + if (parsed != buf1len) goto err; + return; + + err: + fprintf(stderr, + "\n*** error in test_no_overflow_long_body %s of length %lu ***\n", + req ? "REQUEST" : "RESPONSE", + (unsigned long)length); + abort(); +} + +void +test_multiple3 (const struct message *r1, const struct message *r2, const struct message *r3) +{ + int message_count = count_parsed_messages(3, r1, r2, r3); + + char total[ strlen(r1->raw) + + strlen(r2->raw) + + strlen(r3->raw) + + 1 + ]; + total[0] = '\0'; + + strcat(total, r1->raw); + strcat(total, r2->raw); + strcat(total, r3->raw); + + parser_init(r1->type); + + size_t read; + + read = parse(total, strlen(total)); + + if (parser->upgrade) { + upgrade_message_fix(total, read, 3, r1, r2, r3); + goto test; + } + + if (read != strlen(total)) { + print_error(total, read); + abort(); + } + + read = parse(NULL, 0); + + if (read != 0) { + print_error(total, read); + abort(); + } + +test: + + if (message_count != num_messages) { + fprintf(stderr, "\n\n*** Parser didn't see 3 messages only %d *** \n", num_messages); + abort(); + } + + if (!message_eq(0, r1)) abort(); + if (message_count > 1 && !message_eq(1, r2)) abort(); + if (message_count > 2 && !message_eq(2, r3)) abort(); + + parser_free(); +} + +/* SCAN through every possible breaking to make sure the + * parser can handle getting the content in any chunks that + * might come from the socket + */ +void +test_scan (const struct message *r1, const struct message *r2, const struct message *r3) +{ + char total[80*1024] = "\0"; + char buf1[80*1024] = "\0"; + char buf2[80*1024] = "\0"; + char buf3[80*1024] = "\0"; + + strcat(total, r1->raw); + strcat(total, r2->raw); + strcat(total, r3->raw); + + size_t read; + + int total_len = strlen(total); + + int total_ops = 2 * (total_len - 1) * (total_len - 2) / 2; + int ops = 0 ; + + size_t buf1_len, buf2_len, buf3_len; + int message_count = count_parsed_messages(3, r1, r2, r3); + + int i,j,type_both; + for (type_both = 0; type_both < 2; type_both ++ ) { + for (j = 2; j < total_len; j ++ ) { + for (i = 1; i < j; i ++ ) { + + if (ops % 1000 == 0) { + printf("\b\b\b\b%3.0f%%", 100 * (float)ops /(float)total_ops); + fflush(stdout); + } + ops += 1; + + parser_init(type_both ? HTTP_BOTH : r1->type); + + buf1_len = i; + strlncpy(buf1, sizeof(buf1), total, buf1_len); + buf1[buf1_len] = 0; + + buf2_len = j - i; + strlncpy(buf2, sizeof(buf1), total+i, buf2_len); + buf2[buf2_len] = 0; + + buf3_len = total_len - j; + strlncpy(buf3, sizeof(buf1), total+j, buf3_len); + buf3[buf3_len] = 0; + + read = parse(buf1, buf1_len); + + if (parser->upgrade) goto test; + + if (read != buf1_len) { + print_error(buf1, read); + goto error; + } + + read += parse(buf2, buf2_len); + + if (parser->upgrade) goto test; + + if (read != buf1_len + buf2_len) { + print_error(buf2, read); + goto error; + } + + read += parse(buf3, buf3_len); + + if (parser->upgrade) goto test; + + if (read != buf1_len + buf2_len + buf3_len) { + print_error(buf3, read); + goto error; + } + + parse(NULL, 0); + +test: + if (parser->upgrade) { + upgrade_message_fix(total, read, 3, r1, r2, r3); + } + + if (message_count != num_messages) { + fprintf(stderr, "\n\nParser didn't see %d messages only %d\n", + message_count, num_messages); + goto error; + } + + if (!message_eq(0, r1)) { + fprintf(stderr, "\n\nError matching messages[0] in test_scan.\n"); + goto error; + } + + if (message_count > 1 && !message_eq(1, r2)) { + fprintf(stderr, "\n\nError matching messages[1] in test_scan.\n"); + goto error; + } + + if (message_count > 2 && !message_eq(2, r3)) { + fprintf(stderr, "\n\nError matching messages[2] in test_scan.\n"); + goto error; + } + + parser_free(); + } + } + } + puts("\b\b\b\b100%"); + return; + + error: + fprintf(stderr, "i=%d j=%d\n", i, j); + fprintf(stderr, "buf1 (%u) %s\n\n", (unsigned int)buf1_len, buf1); + fprintf(stderr, "buf2 (%u) %s\n\n", (unsigned int)buf2_len , buf2); + fprintf(stderr, "buf3 (%u) %s\n", (unsigned int)buf3_len, buf3); + abort(); +} + +// user required to free the result +// string terminated by \0 +char * +create_large_chunked_message (int body_size_in_kb, const char* headers) +{ + int i; + size_t wrote = 0; + size_t headers_len = strlen(headers); + size_t bufsize = headers_len + (5+1024+2)*body_size_in_kb + 6; + char * buf = malloc(bufsize); + + memcpy(buf, headers, headers_len); + wrote += headers_len; + + for (i = 0; i < body_size_in_kb; i++) { + // write 1kb chunk into the body. + memcpy(buf + wrote, "400\r\n", 5); + wrote += 5; + memset(buf + wrote, 'C', 1024); + wrote += 1024; + strcpy(buf + wrote, "\r\n"); + wrote += 2; + } + + memcpy(buf + wrote, "0\r\n\r\n", 6); + wrote += 6; + assert(wrote == bufsize); + + return buf; +} + +/* Verify that we can pause parsing at any of the bytes in the + * message and still get the result that we're expecting. */ +void +test_message_pause (const struct message *msg) +{ + char *buf = (char*) msg->raw; + size_t buflen = strlen(msg->raw); + size_t nread; + + parser_init(msg->type); + + do { + nread = parse_pause(buf, buflen); + + // We can only set the upgrade buffer once we've gotten our message + // completion callback. + if (messages[0].message_complete_cb_called && + msg->upgrade && + parser->upgrade) { + messages[0].upgrade = buf + nread; + goto test; + } + + if (nread < buflen) { + + // Not much do to if we failed a strict-mode check + if (HTTP_PARSER_ERRNO(parser) == HPE_STRICT) { + parser_free(); + return; + } + + assert (HTTP_PARSER_ERRNO(parser) == HPE_PAUSED); + } + + buf += nread; + buflen -= nread; + http_parser_pause(parser, 0); + } while (buflen > 0); + + nread = parse_pause(NULL, 0); + assert (nread == 0); + +test: + if (num_messages != 1) { + printf("\n*** num_messages != 1 after testing '%s' ***\n\n", msg->name); + abort(); + } + + if(!message_eq(0, msg)) abort(); + + parser_free(); +} + +int +main (void) +{ + parser = NULL; + int i, j, k; + int request_count; + int response_count; + unsigned long version; + unsigned major; + unsigned minor; + unsigned patch; + + version = http_parser_version(); + major = (version >> 16) & 255; + minor = (version >> 8) & 255; + patch = version & 255; + printf("http_parser v%u.%u.%u (0x%06lx)\n", major, minor, patch, version); + + printf("sizeof(http_parser) = %u\n", (unsigned int)sizeof(http_parser)); + + for (request_count = 0; requests[request_count].name; request_count++); + for (response_count = 0; responses[response_count].name; response_count++); + + //// API + test_preserve_data(); + test_parse_url(); + test_method_str(); + + //// NREAD + test_header_nread_value(); + + //// OVERFLOW CONDITIONS + + test_header_overflow_error(HTTP_REQUEST); + test_no_overflow_long_body(HTTP_REQUEST, 1000); + test_no_overflow_long_body(HTTP_REQUEST, 100000); + + test_header_overflow_error(HTTP_RESPONSE); + test_no_overflow_long_body(HTTP_RESPONSE, 1000); + test_no_overflow_long_body(HTTP_RESPONSE, 100000); + + test_header_content_length_overflow_error(); + test_chunk_content_length_overflow_error(); + + //// RESPONSES + + for (i = 0; i < response_count; i++) { + test_message(&responses[i]); + } + + for (i = 0; i < response_count; i++) { + test_message_pause(&responses[i]); + } + + for (i = 0; i < response_count; i++) { + if (!responses[i].should_keep_alive) continue; + for (j = 0; j < response_count; j++) { + if (!responses[j].should_keep_alive) continue; + for (k = 0; k < response_count; k++) { + test_multiple3(&responses[i], &responses[j], &responses[k]); + } + } + } + + test_message_count_body(&responses[NO_HEADERS_NO_BODY_404]); + test_message_count_body(&responses[TRAILING_SPACE_ON_CHUNKED_BODY]); + + // test very large chunked response + { + char * msg = create_large_chunked_message(31337, + "HTTP/1.0 200 OK\r\n" + "Transfer-Encoding: chunked\r\n" + "Content-Type: text/plain\r\n" + "\r\n"); + struct message large_chunked = + {.name= "large chunked" + ,.type= HTTP_RESPONSE + ,.raw= msg + ,.should_keep_alive= FALSE + ,.message_complete_on_eof= FALSE + ,.http_major= 1 + ,.http_minor= 0 + ,.status_code= 200 + ,.response_status= "OK" + ,.num_headers= 2 + ,.headers= + { { "Transfer-Encoding", "chunked" } + , { "Content-Type", "text/plain" } + } + ,.body_size= 31337*1024 + ,.num_chunks_complete= 31338 + }; + for (i = 0; i < MAX_CHUNKS; i++) { + large_chunked.chunk_lengths[i] = 1024; + } + test_message_count_body(&large_chunked); + free(msg); + } + + + + printf("response scan 1/2 "); + test_scan( &responses[TRAILING_SPACE_ON_CHUNKED_BODY] + , &responses[NO_BODY_HTTP10_KA_204] + , &responses[NO_REASON_PHRASE] + ); + + printf("response scan 2/2 "); + test_scan( &responses[BONJOUR_MADAME_FR] + , &responses[UNDERSTORE_HEADER_KEY] + , &responses[NO_CARRIAGE_RET] + ); + + puts("responses okay"); + + + /// REQUESTS + + test_simple("GET / HTP/1.1\r\n\r\n", HPE_INVALID_VERSION); + + // Well-formed but incomplete + test_simple("GET / HTTP/1.1\r\n" + "Content-Type: text/plain\r\n" + "Content-Length: 6\r\n" + "\r\n" + "fooba", + HPE_OK); + + static const char *all_methods[] = { + "DELETE", + "GET", + "HEAD", + "POST", + "PUT", + //"CONNECT", //CONNECT can't be tested like other methods, it's a tunnel + "OPTIONS", + "TRACE", + "COPY", + "LOCK", + "MKCOL", + "MOVE", + "PROPFIND", + "PROPPATCH", + "UNLOCK", + "REPORT", + "MKACTIVITY", + "CHECKOUT", + "MERGE", + "M-SEARCH", + "NOTIFY", + "SUBSCRIBE", + "UNSUBSCRIBE", + "PATCH", + 0 }; + const char **this_method; + for (this_method = all_methods; *this_method; this_method++) { + char buf[200]; + sprintf(buf, "%s / HTTP/1.1\r\n\r\n", *this_method); + test_simple(buf, HPE_OK); + } + + static const char *bad_methods[] = { + "ASDF", + "C******", + "COLA", + "GEM", + "GETA", + "M****", + "MKCOLA", + "PROPPATCHA", + "PUN", + "PX", + "SA", + "hello world", + 0 }; + for (this_method = bad_methods; *this_method; this_method++) { + char buf[200]; + sprintf(buf, "%s / HTTP/1.1\r\n\r\n", *this_method); + test_simple(buf, HPE_INVALID_METHOD); + } + + // illegal header field name line folding + test_simple("GET / HTTP/1.1\r\n" + "name\r\n" + " : value\r\n" + "\r\n", + HPE_INVALID_HEADER_TOKEN); + + const char *dumbfuck2 = + "GET / HTTP/1.1\r\n" + "X-SSL-Bullshit: -----BEGIN CERTIFICATE-----\r\n" + "\tMIIFbTCCBFWgAwIBAgICH4cwDQYJKoZIhvcNAQEFBQAwcDELMAkGA1UEBhMCVUsx\r\n" + "\tETAPBgNVBAoTCGVTY2llbmNlMRIwEAYDVQQLEwlBdXRob3JpdHkxCzAJBgNVBAMT\r\n" + "\tAkNBMS0wKwYJKoZIhvcNAQkBFh5jYS1vcGVyYXRvckBncmlkLXN1cHBvcnQuYWMu\r\n" + "\tdWswHhcNMDYwNzI3MTQxMzI4WhcNMDcwNzI3MTQxMzI4WjBbMQswCQYDVQQGEwJV\r\n" + "\tSzERMA8GA1UEChMIZVNjaWVuY2UxEzARBgNVBAsTCk1hbmNoZXN0ZXIxCzAJBgNV\r\n" + "\tBAcTmrsogriqMWLAk1DMRcwFQYDVQQDEw5taWNoYWVsIHBhcmQYJKoZIhvcNAQEB\r\n" + "\tBQADggEPADCCAQoCggEBANPEQBgl1IaKdSS1TbhF3hEXSl72G9J+WC/1R64fAcEF\r\n" + "\tW51rEyFYiIeZGx/BVzwXbeBoNUK41OK65sxGuflMo5gLflbwJtHBRIEKAfVVp3YR\r\n" + "\tgW7cMA/s/XKgL1GEC7rQw8lIZT8RApukCGqOVHSi/F1SiFlPDxuDfmdiNzL31+sL\r\n" + "\t0iwHDdNkGjy5pyBSB8Y79dsSJtCW/iaLB0/n8Sj7HgvvZJ7x0fr+RQjYOUUfrePP\r\n" + "\tu2MSpFyf+9BbC/aXgaZuiCvSR+8Snv3xApQY+fULK/xY8h8Ua51iXoQ5jrgu2SqR\r\n" + "\twgA7BUi3G8LFzMBl8FRCDYGUDy7M6QaHXx1ZWIPWNKsCAwEAAaOCAiQwggIgMAwG\r\n" + "\tA1UdEwEB/wQCMAAwEQYJYIZIAYb4QgHTTPAQDAgWgMA4GA1UdDwEB/wQEAwID6DAs\r\n" + "\tBglghkgBhvhCAQ0EHxYdVUsgZS1TY2llbmNlIFVzZXIgQ2VydGlmaWNhdGUwHQYD\r\n" + "\tVR0OBBYEFDTt/sf9PeMaZDHkUIldrDYMNTBZMIGaBgNVHSMEgZIwgY+AFAI4qxGj\r\n" + "\tloCLDdMVKwiljjDastqooXSkcjBwMQswCQYDVQQGEwJVSzERMA8GA1UEChMIZVNj\r\n" + "\taWVuY2UxEjAQBgNVBAsTCUF1dGhvcml0eTELMAkGA1UEAxMCQ0ExLTArBgkqhkiG\r\n" + "\t9w0BCQEWHmNhLW9wZXJhdG9yQGdyaWQtc3VwcG9ydC5hYy51a4IBADApBgNVHRIE\r\n" + "\tIjAggR5jYS1vcGVyYXRvckBncmlkLXN1cHBvcnQuYWMudWswGQYDVR0gBBIwEDAO\r\n" + "\tBgwrBgEEAdkvAQEBAQYwPQYJYIZIAYb4QgEEBDAWLmh0dHA6Ly9jYS5ncmlkLXN1\r\n" + "\tcHBvcnQuYWMudmT4sopwqlBWsvcHViL2NybC9jYWNybC5jcmwwPQYJYIZIAYb4QgEDBDAWLmh0\r\n" + "\tdHA6Ly9jYS5ncmlkLXN1cHBvcnQuYWMudWsvcHViL2NybC9jYWNybC5jcmwwPwYD\r\n" + "\tVR0fBDgwNjA0oDKgMIYuaHR0cDovL2NhLmdyaWQt5hYy51ay9wdWIv\r\n" + "\tY3JsL2NhY3JsLmNybDANBgkqhkiG9w0BAQUFAAOCAQEAS/U4iiooBENGW/Hwmmd3\r\n" + "\tXCy6Zrt08YjKCzGNjorT98g8uGsqYjSxv/hmi0qlnlHs+k/3Iobc3LjS5AMYr5L8\r\n" + "\tUO7OSkgFFlLHQyC9JzPfmLCAugvzEbyv4Olnsr8hbxF1MbKZoQxUZtMVu29wjfXk\r\n" + "\thTeApBv7eaKCWpSp7MCbvgzm74izKhu3vlDk9w6qVrxePfGgpKPqfHiOoGhFnbTK\r\n" + "\twTC6o2xq5y0qZ03JonF7OJspEd3I5zKY3E+ov7/ZhW6DqT8UFvsAdjvQbXyhV8Eu\r\n" + "\tYhixw1aKEPzNjNowuIseVogKOLXxWI5vAi5HgXdS0/ES5gDGsABo4fqovUKlgop3\r\n" + "\tRA==\r\n" + "\t-----END CERTIFICATE-----\r\n" + "\r\n"; + test_simple(dumbfuck2, HPE_OK); + + const char *corrupted_connection = + "GET / HTTP/1.1\r\n" + "Host: www.example.com\r\n" + "Connection\r\033\065\325eep-Alive\r\n" + "Accept-Encoding: gzip\r\n" + "\r\n"; + test_simple(corrupted_connection, HPE_INVALID_HEADER_TOKEN); + + const char *corrupted_header_name = + "GET / HTTP/1.1\r\n" + "Host: www.example.com\r\n" + "X-Some-Header\r\033\065\325eep-Alive\r\n" + "Accept-Encoding: gzip\r\n" + "\r\n"; + test_simple(corrupted_header_name, HPE_INVALID_HEADER_TOKEN); + +#if 0 + // NOTE(Wed Nov 18 11:57:27 CET 2009) this seems okay. we just read body + // until EOF. + // + // no content-length + // error if there is a body without content length + const char *bad_get_no_headers_no_body = "GET /bad_get_no_headers_no_body/world HTTP/1.1\r\n" + "Accept: */*\r\n" + "\r\n" + "HELLO"; + test_simple(bad_get_no_headers_no_body, 0); +#endif + /* TODO sending junk and large headers gets rejected */ + + + /* check to make sure our predefined requests are okay */ + for (i = 0; requests[i].name; i++) { + test_message(&requests[i]); + } + + for (i = 0; i < request_count; i++) { + test_message_pause(&requests[i]); + } + + for (i = 0; i < request_count; i++) { + if (!requests[i].should_keep_alive) continue; + for (j = 0; j < request_count; j++) { + if (!requests[j].should_keep_alive) continue; + for (k = 0; k < request_count; k++) { + test_multiple3(&requests[i], &requests[j], &requests[k]); + } + } + } + + printf("request scan 1/4 "); + test_scan( &requests[GET_NO_HEADERS_NO_BODY] + , &requests[GET_ONE_HEADER_NO_BODY] + , &requests[GET_NO_HEADERS_NO_BODY] + ); + + printf("request scan 2/4 "); + test_scan( &requests[POST_CHUNKED_ALL_YOUR_BASE] + , &requests[POST_IDENTITY_BODY_WORLD] + , &requests[GET_FUNKY_CONTENT_LENGTH] + ); + + printf("request scan 3/4 "); + test_scan( &requests[TWO_CHUNKS_MULT_ZERO_END] + , &requests[CHUNKED_W_TRAILING_HEADERS] + , &requests[CHUNKED_W_BULLSHIT_AFTER_LENGTH] + ); + + printf("request scan 4/4 "); + test_scan( &requests[QUERY_URL_WITH_QUESTION_MARK_GET] + , &requests[PREFIX_NEWLINE_GET ] + , &requests[CONNECT_REQUEST] + ); + + puts("requests okay"); + + return 0; +} diff --git a/vendor/libgit2 b/vendor/libgit2 new file mode 160000 index 000000000..37dba1a73 --- /dev/null +++ b/vendor/libgit2 @@ -0,0 +1 @@ +Subproject commit 37dba1a739b5ee6c45dc9f3c0bd1f7f7a18f13f7 diff --git a/vendor/libgit2.gyp b/vendor/libgit2.gyp new file mode 100644 index 000000000..06530bdaa --- /dev/null +++ b/vendor/libgit2.gyp @@ -0,0 +1,510 @@ +{ + # Copyright (c) 2012 The Chromium Authors. All rights reserved. + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + "variables": { + "target_arch%": "x86", + "library%": "static_library", + "openssl_enable_asm%": 0, # only supported with the Visual Studio 2012 (VC11) toolchain. + "gcc_version%": 0, + "is_clang%": 0 + }, + "targets": [ + { + "target_name": "libgit2", + "type": "static_library", + "defines": [ + "GIT_THREADS", + "GIT_SSH", + "GIT_SSH_MEMORY_CREDENTIALS", + # Node's util.h may be accidentally included so use this to guard + # against compilation error. + "SRC_UTIL_H_", + ], + "dependencies": [ + "zlib", + "http_parser/http_parser.gyp:http_parser", + "openssl/openssl.gyp:openssl", + "libssh2" + ], + "sources": [ + "libgit2/include/git2/sys/hashsig.h", + "libgit2/include/git2/sys/merge.h", + "libgit2/src/annotated_commit.c", + "libgit2/src/annotated_commit.h", + "libgit2/src/array.h", + "libgit2/src/attr_file.c", + "libgit2/src/attr_file.h", + "libgit2/src/attr.c", + "libgit2/src/attr.h", + "libgit2/src/attrcache.c", + "libgit2/src/attrcache.h", + "libgit2/src/bitvec.h", + "libgit2/src/blame_git.c", + "libgit2/src/blame_git.h", + "libgit2/src/blame.c", + "libgit2/src/blame.h", + "libgit2/src/blob.c", + "libgit2/src/blob.h", + "libgit2/src/branch.c", + "libgit2/src/branch.h", + "libgit2/src/bswap.h", + "libgit2/src/buf_text.c", + "libgit2/src/buf_text.h", + "libgit2/src/buffer.c", + "libgit2/src/buffer.h", + "libgit2/src/cache.c", + "libgit2/src/cache.h", + "libgit2/src/cc-compat.h", + "libgit2/src/checkout.c", + "libgit2/src/checkout.h", + "libgit2/src/cherrypick.c", + "libgit2/src/clone.c", + "libgit2/src/clone.h", + "libgit2/src/commit_list.c", + "libgit2/src/commit_list.h", + "libgit2/src/commit.c", + "libgit2/src/commit.h", + "libgit2/src/common.h", + "libgit2/src/config_cache.c", + "libgit2/src/config_file.c", + "libgit2/src/config_file.h", + "libgit2/src/config.c", + "libgit2/src/config.h", + "libgit2/src/crlf.c", + "libgit2/src/date.c", + "libgit2/src/delta-apply.c", + "libgit2/src/delta-apply.h", + "libgit2/src/delta.c", + "libgit2/src/delta.h", + "libgit2/src/diff_driver.c", + "libgit2/src/diff_driver.h", + "libgit2/src/diff_file.c", + "libgit2/src/diff_file.h", + "libgit2/src/diff_patch.c", + "libgit2/src/diff_patch.h", + "libgit2/src/diff_print.c", + "libgit2/src/diff_stats.c", + "libgit2/src/diff_tform.c", + "libgit2/src/diff_xdiff.c", + "libgit2/src/diff_xdiff.h", + "libgit2/src/diff.c", + "libgit2/src/diff.h", + "libgit2/src/errors.c", + "libgit2/src/fetch.c", + "libgit2/src/fetch.h", + "libgit2/src/fetchhead.c", + "libgit2/src/fetchhead.h", + "libgit2/src/filebuf.c", + "libgit2/src/filebuf.h", + "libgit2/src/fileops.c", + "libgit2/src/fileops.h", + "libgit2/src/filter.c", + "libgit2/src/filter.h", + "libgit2/src/fnmatch.c", + "libgit2/src/fnmatch.h", + "libgit2/src/global.c", + "libgit2/src/global.h", + "libgit2/src/graph.c", + "libgit2/src/hash.c", + "libgit2/src/hash.h", + "libgit2/src/hash/hash_generic.c", + "libgit2/src/hash/hash_generic.h", + "libgit2/src/hash/hash_openssl.h", + "libgit2/src/hashsig.c", + "libgit2/src/ident.c", + "libgit2/src/ignore.c", + "libgit2/src/ignore.h", + "libgit2/src/index.c", + "libgit2/src/index.h", + "libgit2/src/indexer.c", + "libgit2/src/iterator.c", + "libgit2/src/iterator.h", + "libgit2/src/khash.h", + "libgit2/src/map.h", + "libgit2/src/merge_file.c", + "libgit2/src/merge_file.h", + "libgit2/src/merge.c", + "libgit2/src/merge_driver.c", + "libgit2/src/merge.h", + "libgit2/src/message.c", + "libgit2/src/message.h", + "libgit2/src/mwindow.c", + "libgit2/src/mwindow.h", + "libgit2/src/netops.c", + "libgit2/src/netops.h", + "libgit2/src/notes.c", + "libgit2/src/notes.h", + "libgit2/src/object_api.c", + "libgit2/src/object.c", + "libgit2/src/object.h", + "libgit2/src/odb_loose.c", + "libgit2/src/odb_mempack.c", + "libgit2/src/odb_pack.c", + "libgit2/src/odb.c", + "libgit2/src/odb.h", + "libgit2/src/offmap.h", + "libgit2/src/oid.c", + "libgit2/src/oid.h", + "libgit2/src/oidarray.c", + "libgit2/src/oidarray.h", + "libgit2/src/oidmap.h", + "libgit2/src/openssl_stream.c", + "libgit2/src/openssl_stream.h", + "libgit2/src/pack-objects.c", + "libgit2/src/pack-objects.h", + "libgit2/src/pack.c", + "libgit2/src/pack.h", + "libgit2/src/path.c", + "libgit2/src/path.h", + "libgit2/src/pathspec.c", + "libgit2/src/pathspec.h", + "libgit2/src/pool.c", + "libgit2/src/pool.h", + "libgit2/src/posix.c", + "libgit2/src/posix.h", + "libgit2/src/pqueue.c", + "libgit2/src/pqueue.h", + "libgit2/src/proxy.c", + "libgit2/src/push.c", + "libgit2/src/push.h", + "libgit2/src/rebase.c", + "libgit2/src/refdb_fs.c", + "libgit2/src/refdb_fs.h", + "libgit2/src/refdb.c", + "libgit2/src/refdb.h", + "libgit2/src/reflog.c", + "libgit2/src/reflog.h", + "libgit2/src/refs.c", + "libgit2/src/refs.h", + "libgit2/src/refspec.c", + "libgit2/src/refspec.h", + "libgit2/src/remote.c", + "libgit2/src/remote.h", + "libgit2/src/repo_template.h", + "libgit2/src/repository.c", + "libgit2/src/repository.h", + "libgit2/src/reset.c", + "libgit2/src/revert.c", + "libgit2/src/revparse.c", + "libgit2/src/revwalk.c", + "libgit2/src/revwalk.h", + "libgit2/src/settings.c", + "libgit2/src/sha1_lookup.c", + "libgit2/src/sha1_lookup.h", + "libgit2/src/signature.c", + "libgit2/src/signature.h", + "libgit2/src/socket_stream.c", + "libgit2/src/socket_stream.h", + "libgit2/src/sortedcache.c", + "libgit2/src/sortedcache.h", + "libgit2/src/stash.c", + "libgit2/src/status.c", + "libgit2/src/status.h", + "libgit2/src/strmap.c", + "libgit2/src/strmap.h", + "libgit2/src/strnlen.h", + "libgit2/src/submodule.c", + "libgit2/src/submodule.h", + "libgit2/src/sysdir.c", + "libgit2/src/sysdir.h", + "libgit2/src/tag.c", + "libgit2/src/tag.h", + "libgit2/src/thread-utils.c", + "libgit2/src/thread-utils.h", + "libgit2/src/trace.c", + "libgit2/src/trace.h", + "libgit2/src/transaction.c", + "libgit2/src/transport.c", + "libgit2/src/transports/auth.c", + "libgit2/src/transports/auth.h", + "libgit2/src/transports/cred_helpers.c", + "libgit2/src/transports/cred.c", + "libgit2/src/transports/git.c", + "libgit2/src/transports/http.c", + "libgit2/src/transports/local.c", + "libgit2/src/transports/smart_pkt.c", + "libgit2/src/transports/smart_protocol.c", + "libgit2/src/transports/smart.c", + "libgit2/src/transports/smart.h", + "libgit2/src/transports/ssh.c", + "libgit2/src/tree-cache.c", + "libgit2/src/tree-cache.h", + "libgit2/src/tree.c", + "libgit2/src/tree.h", + "libgit2/src/tsort.c", + "libgit2/src/userdiff.h", + "libgit2/src/util.c", + "libgit2/src/util.h", + "libgit2/src/vector.c", + "libgit2/src/vector.h", + "libgit2/src/xdiff/xdiff.h", + "libgit2/src/xdiff/xdiffi.c", + "libgit2/src/xdiff/xdiffi.h", + "libgit2/src/xdiff/xemit.c", + "libgit2/src/xdiff/xemit.h", + "libgit2/src/xdiff/xhistogram.c", + "libgit2/src/xdiff/xinclude.h", + "libgit2/src/xdiff/xmacros.h", + "libgit2/src/xdiff/xmerge.c", + "libgit2/src/xdiff/xpatience.c", + "libgit2/src/xdiff/xprepare.c", + "libgit2/src/xdiff/xprepare.h", + "libgit2/src/xdiff/xtypes.h", + "libgit2/src/xdiff/xutils.c", + "libgit2/src/xdiff/xutils.h", + "libgit2/src/zstream.c", + "libgit2/src/zstream.h" + ], + "conditions": [ + ["OS=='mac'", { + "defines": [ + "GIT_SECURE_TRANSPORT", + "GIT_USE_STAT_MTIMESPEC" + ], + "sources": [ + "libgit2/src/stransport_stream.c", + "libgit2/src/stransport_stream.h", + "libgit2/src/tls_stream.c", + "libgit2/src/tls_stream.h" + ], + "link_settings": { + "xcode_settings": { + "OTHER_LDFLAGS": [ + "-framework Security", + "-framework CoreFoundation" + ], + } + } + }], + ["OS=='linux'", { + "cflags": [ + "-DGIT_SSH", + "-DGIT_SSL", + "-w", + ], + "defines": [ + "GIT_OPENSSL", + "GIT_USE_STAT_MTIM" + ], + "sources": [ + "libgit2/src/tls_stream.c", + "libgit2/src/tls_stream.h" + ] + }], + ["OS=='win'", { + "defines": [ + "GIT_WINHTTP", + ], + "msvs_settings": { + "VCLinkerTool": { + "AdditionalDependencies": [ + "ws2_32.lib" + ], + }, + # Workaround of a strange bug: + # TargetMachine + static_library + x64 = nothing. + "conditions": [ + ["target_arch=='x64'", { + "VCLibrarianTool": { + "AdditionalOptions": [ + "/MACHINE:X64", + ], + }, + }, { + "VCLibrarianTool": { + "AdditionalOptions": [ + "/MACHINE:x86", + ], + }, + }], + ], + }, + "msvs_disabled_warnings": [ + # Conversion from 'ssize_t' to 'int32_t', possible loss of data. + 4244, + # Conversion from 'size_t' to 'int', possible loss of data. + 4267, + # Different 'volatile' qualifiers. + 4090, + # 'volatile void *' differs in levels of indirection from 'int'. + 4047, + # 'InterlockedDecrement' undefined; assuming extern returning int. + 4013, + ], + "include_dirs": [ + "libgit2/deps/regex" + ], + "sources": [ + "libgit2/deps/regex/regex.c", + "libgit2/src/transports/winhttp.c", + "libgit2/src/win32/dir.c", + "libgit2/src/win32/dir.h", + "libgit2/src/win32/error.c", + "libgit2/src/win32/error.h", + "libgit2/src/win32/findfile.c", + "libgit2/src/win32/findfile.h", + "libgit2/src/win32/git2.rc", + "libgit2/src/win32/map.c", + "libgit2/src/win32/mingw-compat.h", + "libgit2/src/win32/msvc-compat.h", + "libgit2/src/win32/path_w32.c", + "libgit2/src/win32/path_w32.h", + "libgit2/src/win32/posix_w32.c", + "libgit2/src/win32/posix.h", + "libgit2/src/win32/precompiled.c", + "libgit2/src/win32/precompiled.h", + "libgit2/src/win32/pthread.c", + "libgit2/src/win32/pthread.h", + "libgit2/src/win32/reparse.h", + "libgit2/src/win32/utf-conv.c", + "libgit2/src/win32/utf-conv.h", + "libgit2/src/win32/version.h", + "libgit2/src/win32/w32_buffer.c", + "libgit2/src/win32/w32_buffer.h", + "libgit2/src/win32/w32_util.c", + "libgit2/src/win32/w32_util.h", + ], + }, { + "libraries": [ + "-lpthread", + ], + "sources": [ + "libgit2/src/unix/map.c", + "libgit2/src/unix/posix.h", + "libgit2/src/unix/realpath.c", + ], + "cflags": [ + "-Wno-missing-field-initializers", + "-Wno-unused-variable", + "-Wno-deprecated-declarations", + ], + "xcode_settings": { + "WARNING_CFLAGS": [ + "-Wno-missing-field-initializers", + "-Wno-unused-variable", + "-Wno-deprecated-declarations", + "-Wno-uninitialized", + ], + }, + }, + ] + ], + "include_dirs": [ + "libgit2/include", + "libgit2/src" + ], + "direct_dependent_settings": { + "include_dirs": [ + "libgit2/include", + ], + }, + }, + { + "target_name": "zlib", + "type": "static_library", + "sources": [ + "libgit2/deps/zlib/adler32.c", + "libgit2/deps/zlib/crc32.c", + "libgit2/deps/zlib/crc32.h", + "libgit2/deps/zlib/deflate.c", + "libgit2/deps/zlib/deflate.h", + "libgit2/deps/zlib/inffast.c", + "libgit2/deps/zlib/inffast.h", + "libgit2/deps/zlib/inffixed.h", + "libgit2/deps/zlib/inflate.c", + "libgit2/deps/zlib/inflate.h", + "libgit2/deps/zlib/inftrees.c", + "libgit2/deps/zlib/inftrees.h", + "libgit2/deps/zlib/trees.c", + "libgit2/deps/zlib/trees.h", + "libgit2/deps/zlib/zconf.h", + "libgit2/deps/zlib/zlib.h", + "libgit2/deps/zlib/zutil.c", + "libgit2/deps/zlib/zutil.h", + ], + "defines": [ + "NO_VIZ", + "STDC", + "NO_GZIP", + ], + "conditions": [ + ["OS=='win'", { + "include_dirs": [ + "libgit2/deps/regex" + ] + }] + ], + "include_dirs": [ + "libgit2/include" + ], + "direct_dependent_settings": { + "include_dirs": [ + "libgit2/deps/zlib", + ], + }, + }, + { + "target_name": "libssh2", + "type": "static_library", + "defines": [ + "NETSNMP_ENABLE_IPV6" + ], + "sources": [ + "libssh2/src/agent.c", + "libssh2/src/crypt.c", + "libssh2/src/keepalive.c", + "libssh2/src/libgcrypt.c", + "libssh2/src/openssl.c", + "libssh2/src/publickey.c", + "libssh2/src/sftp.c", + "libssh2/src/version.c", + "libssh2/src/channel.c", + "libssh2/src/global.c", + "libssh2/src/kex.c", + "libssh2/src/mac.c", + "libssh2/src/packet.c", + "libssh2/src/scp.c", + "libssh2/src/transport.c", + "libssh2/src/comp.c", + "libssh2/src/hostkey.c", + "libssh2/src/knownhost.c", + "libssh2/src/misc.c", + "libssh2/src/pem.c", + "libssh2/src/session.c", + "libssh2/src/userauth.c", + ], + "include_dirs": [ + ".", + "libssh2/include", + ], + "dependencies": [ + "openssl/openssl.gyp:openssl" + ], + "direct_dependent_settings": { + "include_dirs": [ + "libssh2/include" + ] + }, + "conditions": [ + ["OS=='win'", { + "include_dirs": [ + "libssh2/src", + "libssh2/win32", + "libssh2/include" + ], + "defines!": [ + "HAVE_POLL" + ], + "direct_dependent_settings": { + "include_dirs": [ + "libssh2/src", + "libssh2/win32", + "libssh2/include" + ] + } + }], + ] + } + ] +} diff --git a/vendor/libssh2/.gitignore b/vendor/libssh2/.gitignore new file mode 100755 index 000000000..2ef22d090 --- /dev/null +++ b/vendor/libssh2/.gitignore @@ -0,0 +1,32 @@ +.deps +.libs +*.lib +*.pdb +*.dll +*.exe +*.obj +.*.swp +Debug +Release +*.exp +Makefile +aclocal.m4 +autom4te.cache +config.log +config.status +depcomp +libtool +ltmain.sh +ssh2_sample +libssh2-*.tar.gz +INSTALL +*.o +*.lo +*.la +mkinstalldirs +tags +libssh2.pc +TAGS +libssh2_config.h +!win32/libssh2_config.h +stamp-h* diff --git a/vendor/libssh2/CMakeLists.txt b/vendor/libssh2/CMakeLists.txt new file mode 100644 index 000000000..dc585cd4d --- /dev/null +++ b/vendor/libssh2/CMakeLists.txt @@ -0,0 +1,101 @@ +# Copyright (c) 2014, 2015 Alexander Lamaison +# +# Redistribution and use in source and binary forms, +# with or without modification, are permitted provided +# that the following conditions are met: +# +# Redistributions of source code must retain the above +# copyright notice, this list of conditions and the +# following disclaimer. +# +# Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials +# provided with the distribution. +# +# Neither the name of the copyright holder nor the names +# of any other contributors may be used to endorse or +# promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND +# CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, +# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES +# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY +# OF SUCH DAMAGE. + +cmake_minimum_required(VERSION 2.8.11) + +set(CMAKE_MODULE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/cmake) + +project(libssh2 C) +set(PROJECT_URL "https://www.libssh2.org/") +set(PROJECT_DESCRIPTION "The SSH library") + +option(BUILD_SHARED_LIBS "Build Shared Libraries" OFF) + +# Parse version + +file(READ ${CMAKE_CURRENT_SOURCE_DIR}/include/libssh2.h _HEADER_CONTENTS) +string( + REGEX REPLACE ".*#define LIBSSH2_VERSION[ \t]+\"([^\"]+)\".*" "\\1" + LIBSSH2_VERSION "${_HEADER_CONTENTS}") +string( + REGEX REPLACE ".*#define LIBSSH2_VERSION_MAJOR[ \t]+([0-9]+).*" "\\1" + LIBSSH2_VERSION_MAJOR "${_HEADER_CONTENTS}") +string( + REGEX REPLACE ".*#define LIBSSH2_VERSION_MINOR[ \t]+([0-9]+).*" "\\1" + LIBSSH2_VERSION_MINOR "${_HEADER_CONTENTS}") +string( + REGEX REPLACE ".*#define LIBSSH2_VERSION_PATCH[ \t]+([0-9]+).*" "\\1" + LIBSSH2_VERSION_PATCH "${_HEADER_CONTENTS}") + +if(NOT LIBSSH2_VERSION OR + NOT LIBSSH2_VERSION_MAJOR MATCHES "^[0-9]+$" OR + NOT LIBSSH2_VERSION_MINOR MATCHES "^[0-9]+$" OR + NOT LIBSSH2_VERSION_PATCH MATCHES "^[0-9]+$") + message( + FATAL_ERROR + "Unable to parse version from" + "${CMAKE_CURRENT_SOURCE_DIR}/include/libssh2.h") +endif() + +include(GNUInstallDirs) +install( + FILES docs/AUTHORS COPYING docs/HACKING README RELEASE-NOTES NEWS + DESTINATION ${CMAKE_INSTALL_DOCDIR}) + +include(max_warnings) +include(FeatureSummary) + +add_subdirectory(src) + +option(BUILD_EXAMPLES "Build libssh2 examples" ON) +if(BUILD_EXAMPLES) + add_subdirectory(example) +endif() + +option(BUILD_TESTING "Build libssh2 test suite" ON) +if(BUILD_TESTING) + enable_testing() + add_subdirectory(tests) +endif() + +add_subdirectory(docs) + +feature_summary(WHAT ALL) + +set(CPACK_PACKAGE_VERSION_MAJOR ${LIBSSH2_VERSION_MAJOR}) +set(CPACK_PACKAGE_VERSION_MINOR ${LIBSSH2_VERSION_MINOR}) +set(CPACK_PACKAGE_VERSION_PATCH ${LIBSSH2_VERSION_PATCH}) +set(CPACK_PACKAGE_VERSION ${LIBSSH2_VERSION}) +include(CPack) diff --git a/vendor/libssh2/COPYING b/vendor/libssh2/COPYING new file mode 100644 index 000000000..f2ca94772 --- /dev/null +++ b/vendor/libssh2/COPYING @@ -0,0 +1,42 @@ +/* Copyright (c) 2004-2007 Sara Golemon + * Copyright (c) 2005,2006 Mikhail Gusarov + * Copyright (c) 2006-2007 The Written Word, Inc. + * Copyright (c) 2007 Eli Fant + * Copyright (c) 2009-2014 Daniel Stenberg + * Copyright (C) 2008, 2009 Simon Josefsson + * All rights reserved. + * + * Redistribution and use in source and binary forms, + * with or without modification, are permitted provided + * that the following conditions are met: + * + * Redistributions of source code must retain the above + * copyright notice, this list of conditions and the + * following disclaimer. + * + * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following + * disclaimer in the documentation and/or other materials + * provided with the distribution. + * + * Neither the name of the copyright holder nor the names + * of any other contributors may be used to endorse or + * promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND + * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, + * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, + * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, + * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE + * USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY + * OF SUCH DAMAGE. + */ + diff --git a/vendor/libssh2/ChangeLog b/vendor/libssh2/ChangeLog new file mode 100644 index 000000000..404c887c2 --- /dev/null +++ b/vendor/libssh2/ChangeLog @@ -0,0 +1 @@ +see NEWS diff --git a/vendor/libssh2/Makefile.OpenSSL.inc b/vendor/libssh2/Makefile.OpenSSL.inc new file mode 100644 index 000000000..76f3e85ca --- /dev/null +++ b/vendor/libssh2/Makefile.OpenSSL.inc @@ -0,0 +1,2 @@ +CRYPTO_CSOURCES = openssl.c +CRYPTO_HHEADERS = openssl.h diff --git a/vendor/libssh2/Makefile.WinCNG.inc b/vendor/libssh2/Makefile.WinCNG.inc new file mode 100644 index 000000000..c18350eed --- /dev/null +++ b/vendor/libssh2/Makefile.WinCNG.inc @@ -0,0 +1,2 @@ +CRYPTO_CSOURCES = wincng.c +CRYPTO_HHEADERS = wincng.h diff --git a/vendor/libssh2/Makefile.am b/vendor/libssh2/Makefile.am new file mode 100644 index 000000000..761733e73 --- /dev/null +++ b/vendor/libssh2/Makefile.am @@ -0,0 +1,149 @@ +AUTOMAKE_OPTIONS = foreign nostdinc + +SUBDIRS = src tests docs +if BUILD_EXAMPLES +SUBDIRS += example +endif + +pkgconfigdir = $(libdir)/pkgconfig +pkgconfig_DATA = libssh2.pc + +include_HEADERS = \ + include/libssh2.h \ + include/libssh2_publickey.h \ + include/libssh2_sftp.h + +NETWAREFILES = nw/keepscreen.c \ + nw/nwlib.c \ + nw/GNUmakefile \ + nw/test/GNUmakefile + +DSP = win32/libssh2.dsp +VCPROJ = win32/libssh2.vcproj + +DISTCLEANFILES = $(DSP) + +VMSFILES = vms/libssh2_make_example.dcl vms/libssh2_make_help.dcl \ +vms/libssh2_make_kit.dcl vms/libssh2_make_lib.dcl vms/man2help.c \ +vms/readme.vms vms/libssh2_config.h + +WIN32FILES = win32/GNUmakefile win32/test/GNUmakefile \ +win32/libssh2_config.h win32/config.mk win32/rules.mk \ +win32/Makefile.Watcom win32/libssh2.dsw win32/tests.dsp $(DSP) \ +win32/msvcproj.head win32/msvcproj.foot win32/libssh2.rc + +OS400FILES = os400/README400 os400/initscript.sh os400/make.sh \ +os400/make-src.sh os400/make-rpg.sh os400/make-include.sh \ +os400/os400sys.c os400/ccsid.c \ +os400/libssh2_config.h os400/macros.h os400/libssh2_ccsid.h \ +os400/include/alloca.h os400/include/sys/socket.h os400/include/stdio.h \ +os400/libssh2rpg/libssh2.rpgle.in \ +os400/libssh2rpg/libssh2_ccsid.rpgle.in \ +os400/libssh2rpg/libssh2_publickey.rpgle \ +os400/libssh2rpg/libssh2_sftp.rpgle \ +Makefile.os400qc3.inc + +EXTRA_DIST = $(WIN32FILES) buildconf $(NETWAREFILES) get_ver.awk \ + maketgz NMakefile RELEASE-NOTES libssh2.pc.in $(VMSFILES) config.rpath \ + CMakeLists.txt cmake $(OS400FILES) + +ACLOCAL_AMFLAGS = -I m4 + +.PHONY: ChangeLog +ChangeLog: + echo "see NEWS" > ./ChangeLog +DISTCLEANFILES += ChangeLog + +dist-hook: + rm -rf $(top_builddir)/tests/log + find $(distdir) -name "*.dist" -exec rm {} \; + (distit=`find $(srcdir) -name "*.dist"`; \ + for file in $$distit; do \ + strip=`echo $$file | sed -e s/^$(srcdir)// -e s/\.dist//`; \ + cp $$file $(distdir)$$strip; \ + done) + +# Code Coverage + +init-coverage: + make clean + lcov --directory . --zerocounters + +COVERAGE_CCOPTS ?= "-g --coverage" +COVERAGE_OUT ?= docs/coverage + +build-coverage: + make CFLAGS=$(COVERAGE_CCOPTS) check + mkdir -p $(COVERAGE_OUT) + lcov --directory . --output-file $(COVERAGE_OUT)/$(PACKAGE).info \ + --capture + +gen-coverage: + genhtml --output-directory $(COVERAGE_OUT) \ + $(COVERAGE_OUT)/$(PACKAGE).info \ + --highlight --frames --legend \ + --title "$(PACKAGE_NAME)" + +coverage: init-coverage build-coverage gen-coverage + +# DSP/VCPROJ generation adapted from libcurl +# only OpenSSL and WinCNG are supported with this build system +CRYPTO_CSOURCES = openssl.c wincng.c +CRYPTO_HHEADERS = openssl.h wincng.h +# Makefile.inc provides the CSOURCES and HHEADERS defines +include Makefile.inc + +WIN32SOURCES = $(CSOURCES) +WIN32HEADERS = $(HHEADERS) libssh2_config.h + +$(DSP): win32/msvcproj.head win32/msvcproj.foot Makefile.am + echo "creating $(DSP)" + @( (cat $(srcdir)/win32/msvcproj.head; \ + echo "# Begin Group \"Source Files\""; \ + echo ""; \ + echo "# PROP Default_Filter \"cpp;c;cxx\""; \ + win32_srcs='$(WIN32SOURCES)'; \ + sorted_srcs=`for file in $$win32_srcs; do echo $$file; done | sort`; \ + for file in $$sorted_srcs; do \ + echo "# Begin Source File"; \ + echo ""; \ + echo "SOURCE=..\\src\\"$$file; \ + echo "# End Source File"; \ + done; \ + echo "# End Group"; \ + echo "# Begin Group \"Header Files\""; \ + echo ""; \ + echo "# PROP Default_Filter \"h;hpp;hxx\""; \ + win32_hdrs='$(WIN32HEADERS)'; \ + sorted_hdrs=`for file in $$win32_hdrs; do echo $$file; done | sort`; \ + for file in $$sorted_hdrs; do \ + echo "# Begin Source File"; \ + echo ""; \ + if [ "$$file" == "libssh2_config.h" ]; \ + then \ + echo "SOURCE=.\\"$$file; \ + else \ + echo "SOURCE=..\\src\\"$$file; \ + fi; \ + echo "# End Source File"; \ + done; \ + echo "# End Group"; \ + cat $(srcdir)/win32/msvcproj.foot) | \ + awk '{printf("%s\r\n", gensub("\r", "", "g"))}' > $@ ) + +$(VCPROJ): win32/vc8proj.head win32/vc8proj.foot Makefile.am + echo "creating $(VCPROJ)" + @( (cat $(srcdir)/vc8proj.head; \ + win32_srcs='$(WIN32SOURCES)'; \ + sorted_srcs=`for file in $$win32_srcs; do echo $$file; done | sort`; \ + for file in $$sorted_srcs; do \ + echo ""; \ + done; \ + echo ""; \ + win32_hdrs='$(WIN32HEADERS)'; \ + sorted_hdrs=`for file in $$win32_hdrs; do echo $$file; done | sort`; \ + for file in $$sorted_hdrs; do \ + echo ""; \ + done; \ + cat $(srcdir)/vc8proj.foot) | \ + awk '{printf("%s\r\n", gensub("\r", "", "g"))}' > $@ ) diff --git a/vendor/libssh2/Makefile.in b/vendor/libssh2/Makefile.in new file mode 100644 index 000000000..624dfebd4 --- /dev/null +++ b/vendor/libssh2/Makefile.in @@ -0,0 +1,1053 @@ +# Makefile.in generated by automake 1.15 from Makefile.am. +# @configure_input@ + +# Copyright (C) 1994-2014 Free Software Foundation, Inc. + +# This Makefile.in is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY, to the extent permitted by law; without +# even the implied warranty of MERCHANTABILITY or FITNESS FOR A +# PARTICULAR PURPOSE. + +@SET_MAKE@ + + +VPATH = @srcdir@ +am__is_gnu_make = { \ + if test -z '$(MAKELEVEL)'; then \ + false; \ + elif test -n '$(MAKE_HOST)'; then \ + true; \ + elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \ + true; \ + else \ + false; \ + fi; \ +} +am__make_running_with_option = \ + case $${target_option-} in \ + ?) ;; \ + *) echo "am__make_running_with_option: internal error: invalid" \ + "target option '$${target_option-}' specified" >&2; \ + exit 1;; \ + esac; \ + has_opt=no; \ + sane_makeflags=$$MAKEFLAGS; \ + if $(am__is_gnu_make); then \ + sane_makeflags=$$MFLAGS; \ + else \ + case $$MAKEFLAGS in \ + *\\[\ \ ]*) \ + bs=\\; \ + sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ + | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ + esac; \ + fi; \ + skip_next=no; \ + strip_trailopt () \ + { \ + flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ + }; \ + for flg in $$sane_makeflags; do \ + test $$skip_next = yes && { skip_next=no; continue; }; \ + case $$flg in \ + *=*|--*) continue;; \ + -*I) strip_trailopt 'I'; skip_next=yes;; \ + -*I?*) strip_trailopt 'I';; \ + -*O) strip_trailopt 'O'; skip_next=yes;; \ + -*O?*) strip_trailopt 'O';; \ + -*l) strip_trailopt 'l'; skip_next=yes;; \ + -*l?*) strip_trailopt 'l';; \ + -[dEDm]) skip_next=yes;; \ + -[JT]) skip_next=yes;; \ + esac; \ + case $$flg in \ + *$$target_option*) has_opt=yes; break;; \ + esac; \ + done; \ + test $$has_opt = yes +am__make_dryrun = (target_option=n; $(am__make_running_with_option)) +am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) +pkgdatadir = $(datadir)/@PACKAGE@ +pkgincludedir = $(includedir)/@PACKAGE@ +pkglibdir = $(libdir)/@PACKAGE@ +pkglibexecdir = $(libexecdir)/@PACKAGE@ +am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd +install_sh_DATA = $(install_sh) -c -m 644 +install_sh_PROGRAM = $(install_sh) -c +install_sh_SCRIPT = $(install_sh) -c +INSTALL_HEADER = $(INSTALL_DATA) +transform = $(program_transform_name) +NORMAL_INSTALL = : +PRE_INSTALL = : +POST_INSTALL = : +NORMAL_UNINSTALL = : +PRE_UNINSTALL = : +POST_UNINSTALL = : +build_triplet = @build@ +host_triplet = @host@ +@BUILD_EXAMPLES_TRUE@am__append_1 = example +subdir = . +ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 +am__aclocal_m4_deps = $(top_srcdir)/m4/autobuild.m4 \ + $(top_srcdir)/m4/lib-ld.m4 $(top_srcdir)/m4/lib-link.m4 \ + $(top_srcdir)/m4/lib-prefix.m4 $(top_srcdir)/m4/libtool.m4 \ + $(top_srcdir)/m4/ltoptions.m4 $(top_srcdir)/m4/ltsugar.m4 \ + $(top_srcdir)/m4/ltversion.m4 $(top_srcdir)/m4/lt~obsolete.m4 \ + $(top_srcdir)/acinclude.m4 $(top_srcdir)/configure.ac +am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ + $(ACLOCAL_M4) +DIST_COMMON = $(srcdir)/Makefile.am $(top_srcdir)/configure \ + $(am__configure_deps) $(include_HEADERS) $(am__DIST_COMMON) +am__CONFIG_DISTCLEAN_FILES = config.status config.cache config.log \ + configure.lineno config.status.lineno +mkinstalldirs = $(install_sh) -d +CONFIG_HEADER = $(top_builddir)/src/libssh2_config.h \ + $(top_builddir)/example/libssh2_config.h +CONFIG_CLEAN_FILES = libssh2.pc +CONFIG_CLEAN_VPATH_FILES = +AM_V_P = $(am__v_P_@AM_V@) +am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) +am__v_P_0 = false +am__v_P_1 = : +AM_V_GEN = $(am__v_GEN_@AM_V@) +am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) +am__v_GEN_0 = @echo " GEN " $@; +am__v_GEN_1 = +AM_V_at = $(am__v_at_@AM_V@) +am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) +am__v_at_0 = @ +am__v_at_1 = +SOURCES = +DIST_SOURCES = +RECURSIVE_TARGETS = all-recursive check-recursive cscopelist-recursive \ + ctags-recursive dvi-recursive html-recursive info-recursive \ + install-data-recursive install-dvi-recursive \ + install-exec-recursive install-html-recursive \ + install-info-recursive install-pdf-recursive \ + install-ps-recursive install-recursive installcheck-recursive \ + installdirs-recursive pdf-recursive ps-recursive \ + tags-recursive uninstall-recursive +am__can_run_installinfo = \ + case $$AM_UPDATE_INFO_DIR in \ + n|no|NO) false;; \ + *) (install-info --version) >/dev/null 2>&1;; \ + esac +am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; +am__vpath_adj = case $$p in \ + $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ + *) f=$$p;; \ + esac; +am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; +am__install_max = 40 +am__nobase_strip_setup = \ + srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` +am__nobase_strip = \ + for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" +am__nobase_list = $(am__nobase_strip_setup); \ + for p in $$list; do echo "$$p $$p"; done | \ + sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ + $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ + if (++n[$$2] == $(am__install_max)) \ + { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ + END { for (dir in files) print dir, files[dir] }' +am__base_list = \ + sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ + sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' +am__uninstall_files_from_dir = { \ + test -z "$$files" \ + || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ + || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ + $(am__cd) "$$dir" && rm -f $$files; }; \ + } +am__installdirs = "$(DESTDIR)$(pkgconfigdir)" \ + "$(DESTDIR)$(includedir)" +DATA = $(pkgconfig_DATA) +HEADERS = $(include_HEADERS) +RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ + distclean-recursive maintainer-clean-recursive +am__recursive_targets = \ + $(RECURSIVE_TARGETS) \ + $(RECURSIVE_CLEAN_TARGETS) \ + $(am__extra_recursive_targets) +AM_RECURSIVE_TARGETS = $(am__recursive_targets:-recursive=) TAGS CTAGS \ + cscope distdir dist dist-all distcheck +am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) +# Read a list of newline-separated strings from the standard input, +# and print each of them once, without duplicates. Input order is +# *not* preserved. +am__uniquify_input = $(AWK) '\ + BEGIN { nonempty = 0; } \ + { items[$$0] = 1; nonempty = 1; } \ + END { if (nonempty) { for (i in items) print i; }; } \ +' +# Make sure the list of sources is unique. This is necessary because, +# e.g., the same source file might be shared among _SOURCES variables +# for different programs/libraries. +am__define_uniq_tagged_files = \ + list='$(am__tagged_files)'; \ + unique=`for i in $$list; do \ + if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ + done | $(am__uniquify_input)` +ETAGS = etags +CTAGS = ctags +CSCOPE = cscope +DIST_SUBDIRS = src tests docs example +am__DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.inc \ + $(srcdir)/libssh2.pc.in COPYING ChangeLog NEWS README compile \ + config.guess config.rpath config.sub depcomp install-sh \ + ltmain.sh missing +DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) +distdir = $(PACKAGE)-$(VERSION) +top_distdir = $(distdir) +am__remove_distdir = \ + if test -d "$(distdir)"; then \ + find "$(distdir)" -type d ! -perm -200 -exec chmod u+w {} ';' \ + && rm -rf "$(distdir)" \ + || { sleep 5 && rm -rf "$(distdir)"; }; \ + else :; fi +am__post_remove_distdir = $(am__remove_distdir) +am__relativize = \ + dir0=`pwd`; \ + sed_first='s,^\([^/]*\)/.*$$,\1,'; \ + sed_rest='s,^[^/]*/*,,'; \ + sed_last='s,^.*/\([^/]*\)$$,\1,'; \ + sed_butlast='s,/*[^/]*$$,,'; \ + while test -n "$$dir1"; do \ + first=`echo "$$dir1" | sed -e "$$sed_first"`; \ + if test "$$first" != "."; then \ + if test "$$first" = ".."; then \ + dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \ + dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \ + else \ + first2=`echo "$$dir2" | sed -e "$$sed_first"`; \ + if test "$$first2" = "$$first"; then \ + dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \ + else \ + dir2="../$$dir2"; \ + fi; \ + dir0="$$dir0"/"$$first"; \ + fi; \ + fi; \ + dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \ + done; \ + reldir="$$dir2" +DIST_ARCHIVES = $(distdir).tar.gz +GZIP_ENV = --best +DIST_TARGETS = dist-gzip +distuninstallcheck_listfiles = find . -type f -print +am__distuninstallcheck_listfiles = $(distuninstallcheck_listfiles) \ + | sed 's|^\./|$(prefix)/|' | grep -v '$(infodir)/dir$$' +distcleancheck_listfiles = find . -type f -print +ACLOCAL = @ACLOCAL@ +ALLOCA = @ALLOCA@ +AMTAR = @AMTAR@ +AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ +AR = @AR@ +AS = @AS@ +AUTOCONF = @AUTOCONF@ +AUTOHEADER = @AUTOHEADER@ +AUTOMAKE = @AUTOMAKE@ +AWK = @AWK@ +CC = @CC@ +CCDEPMODE = @CCDEPMODE@ +CFLAGS = @CFLAGS@ +CPP = @CPP@ +CPPFLAGS = @CPPFLAGS@ +CYGPATH_W = @CYGPATH_W@ +DEFS = @DEFS@ +DEPDIR = @DEPDIR@ +DLLTOOL = @DLLTOOL@ +DSYMUTIL = @DSYMUTIL@ +DUMPBIN = @DUMPBIN@ +ECHO_C = @ECHO_C@ +ECHO_N = @ECHO_N@ +ECHO_T = @ECHO_T@ +EGREP = @EGREP@ +EXEEXT = @EXEEXT@ +FGREP = @FGREP@ +GREP = @GREP@ +HAVE_LIBBCRYPT = @HAVE_LIBBCRYPT@ +HAVE_LIBCRYPT32 = @HAVE_LIBCRYPT32@ +HAVE_LIBGCRYPT = @HAVE_LIBGCRYPT@ +HAVE_LIBSSL = @HAVE_LIBSSL@ +HAVE_LIBZ = @HAVE_LIBZ@ +INSTALL = @INSTALL@ +INSTALL_DATA = @INSTALL_DATA@ +INSTALL_PROGRAM = @INSTALL_PROGRAM@ +INSTALL_SCRIPT = @INSTALL_SCRIPT@ +INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ +LD = @LD@ +LDFLAGS = @LDFLAGS@ +LIBBCRYPT = @LIBBCRYPT@ +LIBBCRYPT_PREFIX = @LIBBCRYPT_PREFIX@ +LIBCRYPT32 = @LIBCRYPT32@ +LIBCRYPT32_PREFIX = @LIBCRYPT32_PREFIX@ +LIBGCRYPT = @LIBGCRYPT@ +LIBGCRYPT_PREFIX = @LIBGCRYPT_PREFIX@ +LIBOBJS = @LIBOBJS@ +LIBS = @LIBS@ +LIBSREQUIRED = @LIBSREQUIRED@ +LIBSSH2VER = @LIBSSH2VER@ +LIBSSL = @LIBSSL@ +LIBSSL_PREFIX = @LIBSSL_PREFIX@ +LIBTOOL = @LIBTOOL@ +LIBZ = @LIBZ@ +LIBZ_PREFIX = @LIBZ_PREFIX@ +LIPO = @LIPO@ +LN_S = @LN_S@ +LTLIBBCRYPT = @LTLIBBCRYPT@ +LTLIBCRYPT32 = @LTLIBCRYPT32@ +LTLIBGCRYPT = @LTLIBGCRYPT@ +LTLIBOBJS = @LTLIBOBJS@ +LTLIBSSL = @LTLIBSSL@ +LTLIBZ = @LTLIBZ@ +MAINT = @MAINT@ +MAKEINFO = @MAKEINFO@ +MANIFEST_TOOL = @MANIFEST_TOOL@ +MKDIR_P = @MKDIR_P@ +NM = @NM@ +NMEDIT = @NMEDIT@ +OBJDUMP = @OBJDUMP@ +OBJEXT = @OBJEXT@ +OTOOL = @OTOOL@ +OTOOL64 = @OTOOL64@ +PACKAGE = @PACKAGE@ +PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ +PACKAGE_NAME = @PACKAGE_NAME@ +PACKAGE_STRING = @PACKAGE_STRING@ +PACKAGE_TARNAME = @PACKAGE_TARNAME@ +PACKAGE_URL = @PACKAGE_URL@ +PACKAGE_VERSION = @PACKAGE_VERSION@ +PATH_SEPARATOR = @PATH_SEPARATOR@ +RANLIB = @RANLIB@ +SED = @SED@ +SET_MAKE = @SET_MAKE@ +SHELL = @SHELL@ +SSHD = @SSHD@ +STRIP = @STRIP@ +VERSION = @VERSION@ +abs_builddir = @abs_builddir@ +abs_srcdir = @abs_srcdir@ +abs_top_builddir = @abs_top_builddir@ +abs_top_srcdir = @abs_top_srcdir@ +ac_ct_AR = @ac_ct_AR@ +ac_ct_CC = @ac_ct_CC@ +ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ +am__include = @am__include@ +am__leading_dot = @am__leading_dot@ +am__quote = @am__quote@ +am__tar = @am__tar@ +am__untar = @am__untar@ +bindir = @bindir@ +build = @build@ +build_alias = @build_alias@ +build_cpu = @build_cpu@ +build_os = @build_os@ +build_vendor = @build_vendor@ +builddir = @builddir@ +datadir = @datadir@ +datarootdir = @datarootdir@ +docdir = @docdir@ +dvidir = @dvidir@ +exec_prefix = @exec_prefix@ +host = @host@ +host_alias = @host_alias@ +host_cpu = @host_cpu@ +host_os = @host_os@ +host_vendor = @host_vendor@ +htmldir = @htmldir@ +includedir = @includedir@ +infodir = @infodir@ +install_sh = @install_sh@ +libdir = @libdir@ +libexecdir = @libexecdir@ +localedir = @localedir@ +localstatedir = @localstatedir@ +mandir = @mandir@ +mkdir_p = @mkdir_p@ +oldincludedir = @oldincludedir@ +pdfdir = @pdfdir@ +prefix = @prefix@ +program_transform_name = @program_transform_name@ +psdir = @psdir@ +runstatedir = @runstatedir@ +sbindir = @sbindir@ +sharedstatedir = @sharedstatedir@ +srcdir = @srcdir@ +sysconfdir = @sysconfdir@ +target_alias = @target_alias@ +top_build_prefix = @top_build_prefix@ +top_builddir = @top_builddir@ +top_srcdir = @top_srcdir@ +AUTOMAKE_OPTIONS = foreign nostdinc +SUBDIRS = src tests docs $(am__append_1) +pkgconfigdir = $(libdir)/pkgconfig +pkgconfig_DATA = libssh2.pc +include_HEADERS = \ + include/libssh2.h \ + include/libssh2_publickey.h \ + include/libssh2_sftp.h + +NETWAREFILES = nw/keepscreen.c \ + nw/nwlib.c \ + nw/GNUmakefile \ + nw/test/GNUmakefile + +DSP = win32/libssh2.dsp +VCPROJ = win32/libssh2.vcproj +DISTCLEANFILES = $(DSP) ChangeLog +VMSFILES = vms/libssh2_make_example.dcl vms/libssh2_make_help.dcl \ +vms/libssh2_make_kit.dcl vms/libssh2_make_lib.dcl vms/man2help.c \ +vms/readme.vms vms/libssh2_config.h + +WIN32FILES = win32/GNUmakefile win32/test/GNUmakefile \ +win32/libssh2_config.h win32/config.mk win32/rules.mk \ +win32/Makefile.Watcom win32/libssh2.dsw win32/tests.dsp $(DSP) \ +win32/msvcproj.head win32/msvcproj.foot win32/libssh2.rc + +OS400FILES = os400/README400 os400/initscript.sh os400/make.sh \ +os400/make-src.sh os400/make-rpg.sh os400/make-include.sh \ +os400/os400sys.c os400/ccsid.c \ +os400/libssh2_config.h os400/macros.h os400/libssh2_ccsid.h \ +os400/include/alloca.h os400/include/sys/socket.h os400/include/stdio.h \ +os400/libssh2rpg/libssh2.rpgle.in \ +os400/libssh2rpg/libssh2_ccsid.rpgle.in \ +os400/libssh2rpg/libssh2_publickey.rpgle \ +os400/libssh2rpg/libssh2_sftp.rpgle \ +Makefile.os400qc3.inc + +EXTRA_DIST = $(WIN32FILES) buildconf $(NETWAREFILES) get_ver.awk \ + maketgz NMakefile RELEASE-NOTES libssh2.pc.in $(VMSFILES) config.rpath \ + CMakeLists.txt cmake $(OS400FILES) + +ACLOCAL_AMFLAGS = -I m4 + +# DSP/VCPROJ generation adapted from libcurl +# only OpenSSL and WinCNG are supported with this build system +CRYPTO_CSOURCES = openssl.c wincng.c +CRYPTO_HHEADERS = openssl.h wincng.h +CSOURCES = channel.c comp.c crypt.c hostkey.c kex.c mac.c misc.c \ + packet.c publickey.c scp.c session.c sftp.c userauth.c transport.c \ + version.c knownhost.c agent.c $(CRYPTO_CSOURCES) pem.c keepalive.c global.c + +HHEADERS = libssh2_priv.h $(CRYPTO_HHEADERS) transport.h channel.h comp.h \ + mac.h misc.h packet.h userauth.h session.h sftp.h crypto.h + +# Makefile.inc provides the CSOURCES and HHEADERS defines +WIN32SOURCES = $(CSOURCES) +WIN32HEADERS = $(HHEADERS) libssh2_config.h +all: all-recursive + +.SUFFIXES: +am--refresh: Makefile + @: +$(srcdir)/Makefile.in: @MAINTAINER_MODE_TRUE@ $(srcdir)/Makefile.am $(srcdir)/Makefile.inc $(am__configure_deps) + @for dep in $?; do \ + case '$(am__configure_deps)' in \ + *$$dep*) \ + echo ' cd $(srcdir) && $(AUTOMAKE) --foreign'; \ + $(am__cd) $(srcdir) && $(AUTOMAKE) --foreign \ + && exit 0; \ + exit 1;; \ + esac; \ + done; \ + echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign Makefile'; \ + $(am__cd) $(top_srcdir) && \ + $(AUTOMAKE) --foreign Makefile +Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status + @case '$?' in \ + *config.status*) \ + echo ' $(SHELL) ./config.status'; \ + $(SHELL) ./config.status;; \ + *) \ + echo ' cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe)'; \ + cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe);; \ + esac; +$(srcdir)/Makefile.inc $(am__empty): + +$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) + $(SHELL) ./config.status --recheck + +$(top_srcdir)/configure: @MAINTAINER_MODE_TRUE@ $(am__configure_deps) + $(am__cd) $(srcdir) && $(AUTOCONF) +$(ACLOCAL_M4): @MAINTAINER_MODE_TRUE@ $(am__aclocal_m4_deps) + $(am__cd) $(srcdir) && $(ACLOCAL) $(ACLOCAL_AMFLAGS) +$(am__aclocal_m4_deps): +libssh2.pc: $(top_builddir)/config.status $(srcdir)/libssh2.pc.in + cd $(top_builddir) && $(SHELL) ./config.status $@ + +mostlyclean-libtool: + -rm -f *.lo + +clean-libtool: + -rm -rf .libs _libs + +distclean-libtool: + -rm -f libtool config.lt +install-pkgconfigDATA: $(pkgconfig_DATA) + @$(NORMAL_INSTALL) + @list='$(pkgconfig_DATA)'; test -n "$(pkgconfigdir)" || list=; \ + if test -n "$$list"; then \ + echo " $(MKDIR_P) '$(DESTDIR)$(pkgconfigdir)'"; \ + $(MKDIR_P) "$(DESTDIR)$(pkgconfigdir)" || exit 1; \ + fi; \ + for p in $$list; do \ + if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ + echo "$$d$$p"; \ + done | $(am__base_list) | \ + while read files; do \ + echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(pkgconfigdir)'"; \ + $(INSTALL_DATA) $$files "$(DESTDIR)$(pkgconfigdir)" || exit $$?; \ + done + +uninstall-pkgconfigDATA: + @$(NORMAL_UNINSTALL) + @list='$(pkgconfig_DATA)'; test -n "$(pkgconfigdir)" || list=; \ + files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ + dir='$(DESTDIR)$(pkgconfigdir)'; $(am__uninstall_files_from_dir) +install-includeHEADERS: $(include_HEADERS) + @$(NORMAL_INSTALL) + @list='$(include_HEADERS)'; test -n "$(includedir)" || list=; \ + if test -n "$$list"; then \ + echo " $(MKDIR_P) '$(DESTDIR)$(includedir)'"; \ + $(MKDIR_P) "$(DESTDIR)$(includedir)" || exit 1; \ + fi; \ + for p in $$list; do \ + if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ + echo "$$d$$p"; \ + done | $(am__base_list) | \ + while read files; do \ + echo " $(INSTALL_HEADER) $$files '$(DESTDIR)$(includedir)'"; \ + $(INSTALL_HEADER) $$files "$(DESTDIR)$(includedir)" || exit $$?; \ + done + +uninstall-includeHEADERS: + @$(NORMAL_UNINSTALL) + @list='$(include_HEADERS)'; test -n "$(includedir)" || list=; \ + files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ + dir='$(DESTDIR)$(includedir)'; $(am__uninstall_files_from_dir) + +# This directory's subdirectories are mostly independent; you can cd +# into them and run 'make' without going through this Makefile. +# To change the values of 'make' variables: instead of editing Makefiles, +# (1) if the variable is set in 'config.status', edit 'config.status' +# (which will cause the Makefiles to be regenerated when you run 'make'); +# (2) otherwise, pass the desired values on the 'make' command line. +$(am__recursive_targets): + @fail=; \ + if $(am__make_keepgoing); then \ + failcom='fail=yes'; \ + else \ + failcom='exit 1'; \ + fi; \ + dot_seen=no; \ + target=`echo $@ | sed s/-recursive//`; \ + case "$@" in \ + distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ + *) list='$(SUBDIRS)' ;; \ + esac; \ + for subdir in $$list; do \ + echo "Making $$target in $$subdir"; \ + if test "$$subdir" = "."; then \ + dot_seen=yes; \ + local_target="$$target-am"; \ + else \ + local_target="$$target"; \ + fi; \ + ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ + || eval $$failcom; \ + done; \ + if test "$$dot_seen" = "no"; then \ + $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ + fi; test -z "$$fail" + +ID: $(am__tagged_files) + $(am__define_uniq_tagged_files); mkid -fID $$unique +tags: tags-recursive +TAGS: tags + +tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) + set x; \ + here=`pwd`; \ + if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ + include_option=--etags-include; \ + empty_fix=.; \ + else \ + include_option=--include; \ + empty_fix=; \ + fi; \ + list='$(SUBDIRS)'; for subdir in $$list; do \ + if test "$$subdir" = .; then :; else \ + test ! -f $$subdir/TAGS || \ + set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \ + fi; \ + done; \ + $(am__define_uniq_tagged_files); \ + shift; \ + if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ + test -n "$$unique" || unique=$$empty_fix; \ + if test $$# -gt 0; then \ + $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ + "$$@" $$unique; \ + else \ + $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ + $$unique; \ + fi; \ + fi +ctags: ctags-recursive + +CTAGS: ctags +ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) + $(am__define_uniq_tagged_files); \ + test -z "$(CTAGS_ARGS)$$unique" \ + || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ + $$unique + +GTAGS: + here=`$(am__cd) $(top_builddir) && pwd` \ + && $(am__cd) $(top_srcdir) \ + && gtags -i $(GTAGS_ARGS) "$$here" +cscope: cscope.files + test ! -s cscope.files \ + || $(CSCOPE) -b -q $(AM_CSCOPEFLAGS) $(CSCOPEFLAGS) -i cscope.files $(CSCOPE_ARGS) +clean-cscope: + -rm -f cscope.files +cscope.files: clean-cscope cscopelist +cscopelist: cscopelist-recursive + +cscopelist-am: $(am__tagged_files) + list='$(am__tagged_files)'; \ + case "$(srcdir)" in \ + [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ + *) sdir=$(subdir)/$(srcdir) ;; \ + esac; \ + for i in $$list; do \ + if test -f "$$i"; then \ + echo "$(subdir)/$$i"; \ + else \ + echo "$$sdir/$$i"; \ + fi; \ + done >> $(top_builddir)/cscope.files + +distclean-tags: + -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags + -rm -f cscope.out cscope.in.out cscope.po.out cscope.files + +distdir: $(DISTFILES) + $(am__remove_distdir) + test -d "$(distdir)" || mkdir "$(distdir)" + @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ + topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ + list='$(DISTFILES)'; \ + dist_files=`for file in $$list; do echo $$file; done | \ + sed -e "s|^$$srcdirstrip/||;t" \ + -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ + case $$dist_files in \ + */*) $(MKDIR_P) `echo "$$dist_files" | \ + sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ + sort -u` ;; \ + esac; \ + for file in $$dist_files; do \ + if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ + if test -d $$d/$$file; then \ + dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ + if test -d "$(distdir)/$$file"; then \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ + fi; \ + if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ + cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ + fi; \ + cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ + else \ + test -f "$(distdir)/$$file" \ + || cp -p $$d/$$file "$(distdir)/$$file" \ + || exit 1; \ + fi; \ + done + @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ + if test "$$subdir" = .; then :; else \ + $(am__make_dryrun) \ + || test -d "$(distdir)/$$subdir" \ + || $(MKDIR_P) "$(distdir)/$$subdir" \ + || exit 1; \ + dir1=$$subdir; dir2="$(distdir)/$$subdir"; \ + $(am__relativize); \ + new_distdir=$$reldir; \ + dir1=$$subdir; dir2="$(top_distdir)"; \ + $(am__relativize); \ + new_top_distdir=$$reldir; \ + echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \ + echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \ + ($(am__cd) $$subdir && \ + $(MAKE) $(AM_MAKEFLAGS) \ + top_distdir="$$new_top_distdir" \ + distdir="$$new_distdir" \ + am__remove_distdir=: \ + am__skip_length_check=: \ + am__skip_mode_fix=: \ + distdir) \ + || exit 1; \ + fi; \ + done + $(MAKE) $(AM_MAKEFLAGS) \ + top_distdir="$(top_distdir)" distdir="$(distdir)" \ + dist-hook + -test -n "$(am__skip_mode_fix)" \ + || find "$(distdir)" -type d ! -perm -755 \ + -exec chmod u+rwx,go+rx {} \; -o \ + ! -type d ! -perm -444 -links 1 -exec chmod a+r {} \; -o \ + ! -type d ! -perm -400 -exec chmod a+r {} \; -o \ + ! -type d ! -perm -444 -exec $(install_sh) -c -m a+r {} {} \; \ + || chmod -R a+r "$(distdir)" +dist-gzip: distdir + tardir=$(distdir) && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).tar.gz + $(am__post_remove_distdir) + +dist-bzip2: distdir + tardir=$(distdir) && $(am__tar) | BZIP2=$${BZIP2--9} bzip2 -c >$(distdir).tar.bz2 + $(am__post_remove_distdir) + +dist-lzip: distdir + tardir=$(distdir) && $(am__tar) | lzip -c $${LZIP_OPT--9} >$(distdir).tar.lz + $(am__post_remove_distdir) + +dist-xz: distdir + tardir=$(distdir) && $(am__tar) | XZ_OPT=$${XZ_OPT--e} xz -c >$(distdir).tar.xz + $(am__post_remove_distdir) + +dist-tarZ: distdir + @echo WARNING: "Support for distribution archives compressed with" \ + "legacy program 'compress' is deprecated." >&2 + @echo WARNING: "It will be removed altogether in Automake 2.0" >&2 + tardir=$(distdir) && $(am__tar) | compress -c >$(distdir).tar.Z + $(am__post_remove_distdir) + +dist-shar: distdir + @echo WARNING: "Support for shar distribution archives is" \ + "deprecated." >&2 + @echo WARNING: "It will be removed altogether in Automake 2.0" >&2 + shar $(distdir) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).shar.gz + $(am__post_remove_distdir) + +dist-zip: distdir + -rm -f $(distdir).zip + zip -rq $(distdir).zip $(distdir) + $(am__post_remove_distdir) + +dist dist-all: + $(MAKE) $(AM_MAKEFLAGS) $(DIST_TARGETS) am__post_remove_distdir='@:' + $(am__post_remove_distdir) + +# This target untars the dist file and tries a VPATH configuration. Then +# it guarantees that the distribution is self-contained by making another +# tarfile. +distcheck: dist + case '$(DIST_ARCHIVES)' in \ + *.tar.gz*) \ + GZIP=$(GZIP_ENV) gzip -dc $(distdir).tar.gz | $(am__untar) ;;\ + *.tar.bz2*) \ + bzip2 -dc $(distdir).tar.bz2 | $(am__untar) ;;\ + *.tar.lz*) \ + lzip -dc $(distdir).tar.lz | $(am__untar) ;;\ + *.tar.xz*) \ + xz -dc $(distdir).tar.xz | $(am__untar) ;;\ + *.tar.Z*) \ + uncompress -c $(distdir).tar.Z | $(am__untar) ;;\ + *.shar.gz*) \ + GZIP=$(GZIP_ENV) gzip -dc $(distdir).shar.gz | unshar ;;\ + *.zip*) \ + unzip $(distdir).zip ;;\ + esac + chmod -R a-w $(distdir) + chmod u+w $(distdir) + mkdir $(distdir)/_build $(distdir)/_build/sub $(distdir)/_inst + chmod a-w $(distdir) + test -d $(distdir)/_build || exit 0; \ + dc_install_base=`$(am__cd) $(distdir)/_inst && pwd | sed -e 's,^[^:\\/]:[\\/],/,'` \ + && dc_destdir="$${TMPDIR-/tmp}/am-dc-$$$$/" \ + && am__cwd=`pwd` \ + && $(am__cd) $(distdir)/_build/sub \ + && ../../configure \ + $(AM_DISTCHECK_CONFIGURE_FLAGS) \ + $(DISTCHECK_CONFIGURE_FLAGS) \ + --srcdir=../.. --prefix="$$dc_install_base" \ + && $(MAKE) $(AM_MAKEFLAGS) \ + && $(MAKE) $(AM_MAKEFLAGS) dvi \ + && $(MAKE) $(AM_MAKEFLAGS) check \ + && $(MAKE) $(AM_MAKEFLAGS) install \ + && $(MAKE) $(AM_MAKEFLAGS) installcheck \ + && $(MAKE) $(AM_MAKEFLAGS) uninstall \ + && $(MAKE) $(AM_MAKEFLAGS) distuninstallcheck_dir="$$dc_install_base" \ + distuninstallcheck \ + && chmod -R a-w "$$dc_install_base" \ + && ({ \ + (cd ../.. && umask 077 && mkdir "$$dc_destdir") \ + && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" install \ + && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" uninstall \ + && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" \ + distuninstallcheck_dir="$$dc_destdir" distuninstallcheck; \ + } || { rm -rf "$$dc_destdir"; exit 1; }) \ + && rm -rf "$$dc_destdir" \ + && $(MAKE) $(AM_MAKEFLAGS) dist \ + && rm -rf $(DIST_ARCHIVES) \ + && $(MAKE) $(AM_MAKEFLAGS) distcleancheck \ + && cd "$$am__cwd" \ + || exit 1 + $(am__post_remove_distdir) + @(echo "$(distdir) archives ready for distribution: "; \ + list='$(DIST_ARCHIVES)'; for i in $$list; do echo $$i; done) | \ + sed -e 1h -e 1s/./=/g -e 1p -e 1x -e '$$p' -e '$$x' +distuninstallcheck: + @test -n '$(distuninstallcheck_dir)' || { \ + echo 'ERROR: trying to run $@ with an empty' \ + '$$(distuninstallcheck_dir)' >&2; \ + exit 1; \ + }; \ + $(am__cd) '$(distuninstallcheck_dir)' || { \ + echo 'ERROR: cannot chdir into $(distuninstallcheck_dir)' >&2; \ + exit 1; \ + }; \ + test `$(am__distuninstallcheck_listfiles) | wc -l` -eq 0 \ + || { echo "ERROR: files left after uninstall:" ; \ + if test -n "$(DESTDIR)"; then \ + echo " (check DESTDIR support)"; \ + fi ; \ + $(distuninstallcheck_listfiles) ; \ + exit 1; } >&2 +distcleancheck: distclean + @if test '$(srcdir)' = . ; then \ + echo "ERROR: distcleancheck can only run from a VPATH build" ; \ + exit 1 ; \ + fi + @test `$(distcleancheck_listfiles) | wc -l` -eq 0 \ + || { echo "ERROR: files left in build directory after distclean:" ; \ + $(distcleancheck_listfiles) ; \ + exit 1; } >&2 +check-am: all-am +check: check-recursive +all-am: Makefile $(DATA) $(HEADERS) +installdirs: installdirs-recursive +installdirs-am: + for dir in "$(DESTDIR)$(pkgconfigdir)" "$(DESTDIR)$(includedir)"; do \ + test -z "$$dir" || $(MKDIR_P) "$$dir"; \ + done +install: install-recursive +install-exec: install-exec-recursive +install-data: install-data-recursive +uninstall: uninstall-recursive + +install-am: all-am + @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am + +installcheck: installcheck-recursive +install-strip: + if test -z '$(STRIP)'; then \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + install; \ + else \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ + fi +mostlyclean-generic: + +clean-generic: + +distclean-generic: + -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) + -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) + -test -z "$(DISTCLEANFILES)" || rm -f $(DISTCLEANFILES) + +maintainer-clean-generic: + @echo "This command is intended for maintainers to use" + @echo "it deletes files that may require special tools to rebuild." +clean: clean-recursive + +clean-am: clean-generic clean-libtool mostlyclean-am + +distclean: distclean-recursive + -rm -f $(am__CONFIG_DISTCLEAN_FILES) + -rm -f Makefile +distclean-am: clean-am distclean-generic distclean-libtool \ + distclean-tags + +dvi: dvi-recursive + +dvi-am: + +html: html-recursive + +html-am: + +info: info-recursive + +info-am: + +install-data-am: install-includeHEADERS install-pkgconfigDATA + +install-dvi: install-dvi-recursive + +install-dvi-am: + +install-exec-am: + +install-html: install-html-recursive + +install-html-am: + +install-info: install-info-recursive + +install-info-am: + +install-man: + +install-pdf: install-pdf-recursive + +install-pdf-am: + +install-ps: install-ps-recursive + +install-ps-am: + +installcheck-am: + +maintainer-clean: maintainer-clean-recursive + -rm -f $(am__CONFIG_DISTCLEAN_FILES) + -rm -rf $(top_srcdir)/autom4te.cache + -rm -f Makefile +maintainer-clean-am: distclean-am maintainer-clean-generic + +mostlyclean: mostlyclean-recursive + +mostlyclean-am: mostlyclean-generic mostlyclean-libtool + +pdf: pdf-recursive + +pdf-am: + +ps: ps-recursive + +ps-am: + +uninstall-am: uninstall-includeHEADERS uninstall-pkgconfigDATA + +.MAKE: $(am__recursive_targets) install-am install-strip + +.PHONY: $(am__recursive_targets) CTAGS GTAGS TAGS all all-am \ + am--refresh check check-am clean clean-cscope clean-generic \ + clean-libtool cscope cscopelist-am ctags ctags-am dist \ + dist-all dist-bzip2 dist-gzip dist-hook dist-lzip dist-shar \ + dist-tarZ dist-xz dist-zip distcheck distclean \ + distclean-generic distclean-libtool distclean-tags \ + distcleancheck distdir distuninstallcheck dvi dvi-am html \ + html-am info info-am install install-am install-data \ + install-data-am install-dvi install-dvi-am install-exec \ + install-exec-am install-html install-html-am \ + install-includeHEADERS install-info install-info-am \ + install-man install-pdf install-pdf-am install-pkgconfigDATA \ + install-ps install-ps-am install-strip installcheck \ + installcheck-am installdirs installdirs-am maintainer-clean \ + maintainer-clean-generic mostlyclean mostlyclean-generic \ + mostlyclean-libtool pdf pdf-am ps ps-am tags tags-am uninstall \ + uninstall-am uninstall-includeHEADERS uninstall-pkgconfigDATA + +.PRECIOUS: Makefile + + +.PHONY: ChangeLog +ChangeLog: + echo "see NEWS" > ./ChangeLog + +dist-hook: + rm -rf $(top_builddir)/tests/log + find $(distdir) -name "*.dist" -exec rm {} \; + (distit=`find $(srcdir) -name "*.dist"`; \ + for file in $$distit; do \ + strip=`echo $$file | sed -e s/^$(srcdir)// -e s/\.dist//`; \ + cp $$file $(distdir)$$strip; \ + done) + +# Code Coverage + +init-coverage: + make clean + lcov --directory . --zerocounters + +COVERAGE_CCOPTS ?= "-g --coverage" +COVERAGE_OUT ?= docs/coverage + +build-coverage: + make CFLAGS=$(COVERAGE_CCOPTS) check + mkdir -p $(COVERAGE_OUT) + lcov --directory . --output-file $(COVERAGE_OUT)/$(PACKAGE).info \ + --capture + +gen-coverage: + genhtml --output-directory $(COVERAGE_OUT) \ + $(COVERAGE_OUT)/$(PACKAGE).info \ + --highlight --frames --legend \ + --title "$(PACKAGE_NAME)" + +coverage: init-coverage build-coverage gen-coverage + +$(DSP): win32/msvcproj.head win32/msvcproj.foot Makefile.am + echo "creating $(DSP)" + @( (cat $(srcdir)/win32/msvcproj.head; \ + echo "# Begin Group \"Source Files\""; \ + echo ""; \ + echo "# PROP Default_Filter \"cpp;c;cxx\""; \ + win32_srcs='$(WIN32SOURCES)'; \ + sorted_srcs=`for file in $$win32_srcs; do echo $$file; done | sort`; \ + for file in $$sorted_srcs; do \ + echo "# Begin Source File"; \ + echo ""; \ + echo "SOURCE=..\\src\\"$$file; \ + echo "# End Source File"; \ + done; \ + echo "# End Group"; \ + echo "# Begin Group \"Header Files\""; \ + echo ""; \ + echo "# PROP Default_Filter \"h;hpp;hxx\""; \ + win32_hdrs='$(WIN32HEADERS)'; \ + sorted_hdrs=`for file in $$win32_hdrs; do echo $$file; done | sort`; \ + for file in $$sorted_hdrs; do \ + echo "# Begin Source File"; \ + echo ""; \ + if [ "$$file" == "libssh2_config.h" ]; \ + then \ + echo "SOURCE=.\\"$$file; \ + else \ + echo "SOURCE=..\\src\\"$$file; \ + fi; \ + echo "# End Source File"; \ + done; \ + echo "# End Group"; \ + cat $(srcdir)/win32/msvcproj.foot) | \ + awk '{printf("%s\r\n", gensub("\r", "", "g"))}' > $@ ) + +$(VCPROJ): win32/vc8proj.head win32/vc8proj.foot Makefile.am + echo "creating $(VCPROJ)" + @( (cat $(srcdir)/vc8proj.head; \ + win32_srcs='$(WIN32SOURCES)'; \ + sorted_srcs=`for file in $$win32_srcs; do echo $$file; done | sort`; \ + for file in $$sorted_srcs; do \ + echo ""; \ + done; \ + echo ""; \ + win32_hdrs='$(WIN32HEADERS)'; \ + sorted_hdrs=`for file in $$win32_hdrs; do echo $$file; done | sort`; \ + for file in $$sorted_hdrs; do \ + echo ""; \ + done; \ + cat $(srcdir)/vc8proj.foot) | \ + awk '{printf("%s\r\n", gensub("\r", "", "g"))}' > $@ ) + +# Tell versions [3.59,3.63) of GNU make to not export all variables. +# Otherwise a system limit (for SysV at least) may be exceeded. +.NOEXPORT: diff --git a/vendor/libssh2/Makefile.inc b/vendor/libssh2/Makefile.inc new file mode 100644 index 000000000..8f2e570cb --- /dev/null +++ b/vendor/libssh2/Makefile.inc @@ -0,0 +1,6 @@ +CSOURCES = channel.c comp.c crypt.c hostkey.c kex.c mac.c misc.c \ + packet.c publickey.c scp.c session.c sftp.c userauth.c transport.c \ + version.c knownhost.c agent.c $(CRYPTO_CSOURCES) pem.c keepalive.c global.c + +HHEADERS = libssh2_priv.h $(CRYPTO_HHEADERS) transport.h channel.h comp.h \ + mac.h misc.h packet.h userauth.h session.h sftp.h crypto.h diff --git a/vendor/libssh2/Makefile.libgcrypt.inc b/vendor/libssh2/Makefile.libgcrypt.inc new file mode 100644 index 000000000..5d56292ce --- /dev/null +++ b/vendor/libssh2/Makefile.libgcrypt.inc @@ -0,0 +1,2 @@ +CRYPTO_CSOURCES = libgcrypt.c +CRYPTO_HHEADERS = libgcrypt.h diff --git a/vendor/libssh2/Makefile.os400qc3.inc b/vendor/libssh2/Makefile.os400qc3.inc new file mode 100644 index 000000000..e55094d9b --- /dev/null +++ b/vendor/libssh2/Makefile.os400qc3.inc @@ -0,0 +1,2 @@ +CRYPTO_CSOURCES = os400qc3.c +CRYPTO_HHEADERS = os400qc3.h diff --git a/vendor/libssh2/NEWS b/vendor/libssh2/NEWS new file mode 100644 index 000000000..52ba0d4ab --- /dev/null +++ b/vendor/libssh2/NEWS @@ -0,0 +1,5430 @@ + Changelog for the libssh2 project. Generated with git2news.pl + +Version 1.7.0 (23 Feb 2016) + +Daniel Stenberg (23 Feb 2016) +- web: the site is now HTTPS + +- RELEASE-NOTES: 1.7.0 release + +- diffie_hellman_sha256: convert bytes to bits + + As otherwise we get far too small numbers. + + Reported-by: Andreas Schneider + + CVE-2016-0787 + +Alexander Lamaison (18 Feb 2016) +- Allow CI failures with VS 2008 x64. + + Appveyor doesn't support this combination. + +Daniel Stenberg (16 Feb 2016) +- [Viktor Szakats brought this change] + + GNUmakefile: list system libs after user libs + + Otherwise some referenced WinSock functions will fail to + resolve when linking against LibreSSL 2.3.x static libraries + with mingw. + + Closes #80 + +- [Viktor Szakats brought this change] + + openssl: apply new HAVE_OPAQUE_STRUCTS macro + + Closes #81 + +- [Viktor Szakats brought this change] + + openssl: fix LibreSSL support after OpenSSL 1.1.0-pre1/2 support + +Alexander Lamaison (14 Feb 2016) +- sftp.h: Fix non-C90 type. + + uint64_t does not exist in C90. Use libssh2_uint64_t instead. + +- Exclude sshd tests from AppVeyor. + + They fail complaining that sshd wasn't invoked with an absolute path. + +- Test on more versions of Visual Studio. + +- Fix Appveyor builds. + +Daniel Stenberg (14 Feb 2016) +- [Viktor Szakats brought this change] + + openssl: add OpenSSL 1.1.0-pre3-dev compatibility + + by using API instead of accessing an internal structure. + + Closes #83 + +- RELEASE-NOTES: synced with 996b04ececdf + +- include/libssh2.h: next version is 1.7.0 + +- configure: build "silent" if possible + +- sftp: re-indented some minor stuff + +- [Jakob Egger brought this change] + + sftp.c: ensure minimum read packet size + + For optimum performance we need to ensure we don't request tiny packets. + +- [Jakob Egger brought this change] + + sftp.c: Explicit return values & sanity checks + +- [Jakob Egger brought this change] + + sftp.c: Check Read Packet File Offset + + This commit adds a simple check to see if the offset of the read + request matches the expected file offset. + + We could try to recover, from this condition at some point in the future. + Right now it is better to return an error instead of corrupted data. + +- [Jakob Egger brought this change] + + sftp.c: Don't return EAGAIN if data was written to buffer + +- [Jakob Egger brought this change] + + sftp.c: Send at least one read request before reading + + This commit ensures that we have sent at least one read request before + we try to read data in sftp_read(). + + Otherwise sftp_read() would return 0 bytes (indicating EOF) if the + socket is not ready for writing. + +- [Jakob Egger brought this change] + + sftp.c: stop reading when buffer is full + + Since we can only store data from a single chunk in filep, + we have to stop receiving data as soon as the buffer is full. + + This adresses the following bug report: + https://github.com/libssh2/libssh2/issues/50 + +Salvador Fandiño (21 Jan 2016) +- agent_disconnect_unix: unset the agent fd after closing it + + "agent_disconnect_unix", called by "libssh2_agent_disconnect", was + leaving the file descriptor in the agent structure unchanged. Later, + "libssh2_agent_free" would call again "libssh2_agent_disconnect" under + the hood and it would try to close again the same file descriptor. In + most cases that resulted in just a harmless error, but it is also + possible that the file descriptor had been reused between the two + calls resulting in the closing of an unrelated file descriptor. + + This patch sets agent->fd to LIBSSH2_INVALID_SOCKET avoiding that + issue. + + Signed-off-by: Salvador Fandiño + +Daniel Stenberg (18 Jan 2016) +- [Patrick Monnerat brought this change] + + os400qc3: support encrypted private keys + + PKCS#8 EncryptedPrivateKeyinfo structures are recognized and decoded to get + values accepted by the Qc3 crypto library. + +- [Patrick Monnerat brought this change] + + os400qc3: New PKCS#5 decoder + + The Qc3 library is not able to handle PKCS#8 EncryptedPrivateKeyInfo structures + by itself. It is only capable of decrypting the (encrypted) PrivateKeyInfo + part, providing a key encryption key and an encryption algorithm are given. + Since the encryption key and algorithm description part in a PKCS#8 + EncryptedPrivateKeyInfo is a PKCS#5 structure, such a decoder is needed to + get the derived key method and hash, as well as encryption algorith and + initialisation vector. + +- [Patrick Monnerat brought this change] + + os400qc3: force continuous update on non-final hash/hmac computation + +- [Patrick Monnerat brought this change] + + os400qc3: Be sure hmac keys have a minimum length + + The Qc3 library requires a minimum key length depending on the target + hash algorithm. Append binary zeroes to the given key if not long enough. + This matches RFC 2104 specifications. + +- [Patrick Monnerat brought this change] + + os400qc3: Slave descriptor for key encryption key + + The Qc3 library requires the key encryption key to exist as long as + the encrypted key is used. Its descriptor token is then kept as an + "encrypted key slave" for recursive release. + +- [Patrick Monnerat brought this change] + + os400qc3.c: comment PEM/DER decoding + +- [Patrick Monnerat brought this change] + + os400qc3.c: improve ASN.1 header byte checks + +- [Patrick Monnerat brought this change] + + os400qc3.c: improve OID matching + +- [Patrick Monnerat brought this change] + + os400: os400qc3.c: replace malloc by LIBSSH2_ALLOC or alloca where possible + +- [Patrick Monnerat brought this change] + + os400: asn1_new_from_bytes(): use data from a single element only + +- [Patrick Monnerat brought this change] + + os400: fix an ILE/RPG prototype + +- [Patrick Monnerat brought this change] + + os400: implement character encoding conversion support + +- [Patrick Monnerat brought this change] + + os400: do not miss some external prototypes + + Build procedure extproto() did not strip braces from header files, thus + possibly prepended them to true prototypes. This prevented the prototype to + be recognized as such. + The solution implemented here is to map braces to semicolons, effectively + considering them as potential prototype delimiters. + +- [Patrick Monnerat brought this change] + + os400: Really add specific README + +- [Patrick Monnerat brought this change] + + os400: Add specific README and include new files in dist tarball + +- [Patrick Monnerat brought this change] + + os400: add compilation scripts + +- [Patrick Monnerat brought this change] + + os400: include files for ILE/RPG + + In addition, file os400/macros.h declares all procedures originally + defined as macros. It must not be used for real inclusion and is only + intended to be used as a `database' for macro wrapping procedures generation. + +- [Patrick Monnerat brought this change] + + os400: add supplementary header files/wrappers. Define configuration. + +- [Patrick Monnerat brought this change] + + Protect callback function calls from macro substitution + + Some structure fields holding callback addresses have the same name as the + underlying system function (connect, send, recv). Set parentheses around + their reference to suppress a possible macro substitution. + + Use a macro for connect() on OS/400 to resolve a const/nonconst parameter + problem. + +- [Patrick Monnerat brought this change] + + Add interface for OS/400 crypto library QC3 + +- [Patrick Monnerat brought this change] + + misc: include stdarg.h for debug code + +- [Patrick Monnerat brought this change] + + Document crypto library interface + +- [Patrick Monnerat brought this change] + + Feature an optional crypto-specific macro to rsa sign a data fragment vector + + OS/400 crypto library is unable to sign a precomputed SHA1 hash: however + it does support a procedure that hashes data fragments and rsa signs. + If defined, the new macro _libssh2_rsa_sha1_signv() implements this function + and disables use of _libssh2_rsa_sha1_sign(). + + The function described above requires that the struct iovec unused slacks are + cleared: for this reason, macro libssh2_prepare_iovec() has been introduced. + It should be defined as empty for crypto backends that are not sensitive + to struct iovec unused slack values. + +- [Patrick Monnerat brought this change] + + Fold long lines in include files + +- [Viktor Szakats brought this change] + + kex.c: fix indentation + + Closes #71 + +- [Viktor Szakats brought this change] + + add OpenSSL-1.1.0-pre2 compatibility + + Closes #70 + +- [Viktor Szakats brought this change] + + add OpenSSL 1.1.0-pre1 compatibility + + * close https://github.com/libssh2/libssh2/issues/69 + * sync a declaration with the rest of similar ones + * handle EVP_MD_CTX_new() returning NULL with OpenSSL 1.1.0 + * fix potential memory leak with OpenSSL 1.1.0 in + _libssh2_*_init() functions, when EVP_MD_CTX_new() succeeds, + but EVP_DigestInit() fails. + +Marc Hoersken (22 Dec 2015) +- wincng.c: fixed _libssh2_wincng_hash_final return value + + _libssh2_wincng_hash_final was returning the internal BCRYPT + status code instead of a valid libssh2 return value (0 or -1). + + This also means that _libssh2_wincng_hash never returned 0. + +- wincng.c: fixed possible memory leak in _libssh2_wincng_hash + + If _libssh2_wincng_hash_update failed _libssh2_wincng_hash_final + would never have been called before. + + Reported by Zenju. + +Kamil Dudka (15 Dec 2015) +- [Paul Howarth brought this change] + + libssh2.pc.in: fix the output of pkg-config --libs + + ... such that it does not include LDFLAGS used to build libssh2 itself. + There was a similar fix in the curl project long time ago: + + https://github.com/bagder/curl/commit/curl-7_19_7-56-g4c8adc8 + + Bug: https://bugzilla.redhat.com/1279966 + Signed-off-by: Kamil Dudka + +Marc Hoersken (6 Dec 2015) +- hostkey.c: align code path of ssh_rsa_init to ssh_dss_init + +- hostkey.c: fix invalid memory access if libssh2_dsa_new fails + + Reported by dimmaq, fixes #66 + +Daniel Stenberg (3 Nov 2015) +- [Will Cosgrove brought this change] + + gcrypt: define libssh2_sha256_ctx + + Looks like it didn't make it into the latest commit for whatever reason. + + Closes #58 + +- [Salvador Fandino brought this change] + + libssh2_session_set_last_error: Add function + + Net::SSH2, the Perl wrapping module for libssh2 implements several features* + on top of libssh2 that can fail and so need some mechanism to report the error + condition to the user. + + Until now, besides the error state maintained internally by libssh2, another + error state was maintained at the Perl level for every session object and then + additional logic was used to merge both error states. That is a maintenance + nighmare, and actually there is no way to do it correctly and consistently. + + In order to allow the high level language to add new features to the library + but still rely in its error reporting features the new function + libssh2_session_set_last_error (that just exposses _libssh2_error_flags) is + introduced. + + *) For instance, connecting to a remote SSH service giving the hostname and + port. + + Signed-off-by: Salvador Fandino + Signed-off-by: Salvador Fandiño + +- [Salvador Fandino brought this change] + + _libssh2_error: Support allocating the error message + + Before this patch "_libssh2_error" required the error message to be a + static string. + + This patch adds a new function "_libssh2_error_flags" accepting an + additional "flags" argument and specifically the flag + "LIBSSH2_ERR_FLAG_DUP" indicating that the passed string must be + duplicated into the heap. + + Then, the method "_libssh2_error" has been rewritten to use that new + function under the hood. + + Signed-off-by: Salvador Fandino + Signed-off-by: Salvador Fandiño + +- [Will Cosgrove brought this change] + + added engine.h include to fix warning + +- [sune brought this change] + + kex.c: removed dupe entry from libssh2_kex_methods[] + + Closes #51 + +- [Salvador Fandiño brought this change] + + userauth: Fix off by one error when reading public key file + + After reading the public key from file the size was incorrectly + decremented by one. + + This was usually a harmless error as the last character on the public + key file is an unimportant EOL. But if due to some error the public key + file is empty, the public key size becomes (uint)(0 - 1), resulting in + an unrecoverable out of memory error later. + + Signed-off-by: Salvador Fandi??o + +- [Salvador Fandino brought this change] + + channel: Detect bad usage of libssh2_channel_process_startup + + A common novice programmer error (at least among those using the + wrapping Perl module Net::SSH2), is to try to reuse channels. + + This patchs detects that incorrect usage and fails with a + LIBSSH2_ERROR_BAD_USE error instead of hanging. + + Signed-off-by: Salvador Fandino + +- [Will Cosgrove brought this change] + + kex: Added diffie-hellman-group-exchange-sha256 support + + ... and fixed HMAC_Init depricated usage + + Closes #48 + +Alexander Lamaison (21 Sep 2015) +- Prefixed new #defines to prevent collisions. + + Other libraries might have their own USE_WIN32_*FILES. + +- [keith-daigle brought this change] + + Update examples/scp.c to fix bug where large files on win32 would cause got to wrap and go negative + +- [David Byron brought this change] + + add libssh2_scp_recv2 to support large (> 2GB) files on windows + +Daniel Stenberg (17 Sep 2015) +- [sune brought this change] + + WinCNG: support for SHA256/512 HMAC + + Closes #47 + +- [brian m. carlson brought this change] + + Add support for HMAC-SHA-256 and HMAC-SHA-512. + + Implement support for these algorithms and wire them up to the libgcrypt + and OpenSSL backends. Increase the maximum MAC buffer size to 64 bytes + to prevent buffer overflows. Prefer HMAC-SHA-256 over HMAC-SHA-512, and + that over HMAC-SHA-1, as OpenSSH does. + + Closes #40 + +- [Zenju brought this change] + + kex: free server host key before allocating it (again) + + Fixes a memory leak when Synology server requests key exchange + + Closes #43 + +- [Viktor Szakats brought this change] + + GNUmakefile: up OpenSSL version + + closes #23 + +- [Viktor Szakats brought this change] + + GNUmakefile: add -m64 CFLAGS when targeting mingw64, add -m32/-m64 to LDFLAGS + + libssh2 equivalent of curl patch https://github.com/bagder/curl/commit/d21b66835f2af781a3c2a685abc92ef9f0cd86be + + This allows to build for the non-default target when using a multi-target mingw distro. + Also bump default OpenSSL dependency path to 1.0.2c. + +- [Viktor Szakats brought this change] + + GNUmakefile: add support for LIBSSH2_LDFLAG_EXTRAS + + It is similar to existing LIBSSH2_CFLAG_EXTRAS, but for + extra linker options. + + Also delete some line/file ending whitespace. + + closes #27 + +- [nasacj brought this change] + + hostkey.c: Fix compiling error when OPENSSL_NO_MD5 is defined + + Closes #32 + +- [Mizunashi Mana brought this change] + + openssl.h: adjust the rsa/dsa includes + + ... to work when built without DSA support. + + Closes #36 + +Alexander Lamaison (26 Jul 2015) +- Let CMake build work as a subproject. + + Patch contributed by JasonHaslam. + +- Fix builds with Visual Studio 2015. + + VS2015 moved stdio functions to the header files as inline function. That means check_function_exists can't detect them because it doesn't use header files - just does a link check. Instead we need to use check_symbol_exists with the correct headers. + +Kamil Dudka (2 Jul 2015) +- cmake: include CMake files in the release tarballs + + Despite we announced the CMake support in libssh2-1.6.0 release notes, + the files required by the CMake build system were not included in the + release tarballs. Hence, the only way to use CMake for build was the + upstream git repository. + + This commit makes CMake actually supported in the release tarballs. + +- tests/mansyntax.sh: fix 'make distcheck' with recent autotools + + Do not create symbolic links off the build directory. Recent autotools + verify that out-of-source build works even if the source directory tree + is not writable. + +- openssl: fix memleak in _libssh2_dsa_sha1_verify() + +Daniel Stenberg (12 Jun 2015) +- openssl: make libssh2_sha1 return error code + + - use the internal prefix _libssh2_ for non-exported functions + + - removed libssh2_md5() since it wasn't used + + Reported-by: Kamil Dudka + +- [LarsNordin-LNdata brought this change] + + SFTP: Increase speed and datasize in SFTP read + + The function sftp_read never return more then 2000 bytes (as it should + when I asked Daniel). I increased the MAX_SFTP_READ_SIZE to 30000 but + didn't get the same speed as a sftp read in SecureSSH. I analyzed the + code and found that a return always was dona when a chunk has been read. + I changed it to a sliding buffer and worked on all available chunks. I + got an increase in speed and non of the test I have done has failed + (both local net and over Internet). Please review and test. I think + 30000 is still not the optimal MAX_SFTP_READ_SIZE, my next goal is to + make an API to enable changing this value (The SecureSSH sftp_read has + more complete filled packages when comparing the network traffic) + +- bump: start working on 1.6.1 + +Version 1.6.0 (5 Jun 2015) + +Daniel Stenberg (5 Jun 2015) +- RELEASE-NOTES: synced with 858930cae5c6a + +Marc Hoersken (19 May 2015) +- wincng.c: fixed indentation + +- [sbredahl brought this change] + + wincng.c: fixed memleak in (block) cipher destructor + +Alexander Lamaison (6 May 2015) +- [Jakob Egger brought this change] + + libssh2_channel_open: more detailed error message + + The error message returned by libssh2_channel_open in case of a server side channel open failure is now more detailed and includes the four standard error conditions in RFC 4254. + +- [Hannes Domani brought this change] + + kex: fix libgcrypt memory leaks of bignum + + Fixes #168. + +Marc Hoersken (3 Apr 2015) +- configure.ac: check for SecureZeroMemory for clear memory feature + +- Revert "wincng.c: fix clear memory feature compilation with mingw" + + This reverts commit 2d2744efdd0497b72b3e1ff6e732aa4c0037fc43. + + Autobuilds show that this did not solve the issue. + And it seems like RtlFillMemory is defined to memset, + which would be optimized out by some compilers. + +- wincng.c: fix clear memory feature compilation with mingw + +Alexander Lamaison (1 Apr 2015) +- [LarsNordin-LNdata brought this change] + + Enable use of OpenSSL that doesn't have DSA. + + Added #if LIBSSH2_DSA for all DSA functions. + +- [LarsNordin-LNdata brought this change] + + Use correct no-blowfish #define with OpenSSL. + + The OpenSSL define is OPENSSL_NO_BF, not OPENSSL_NO_BLOWFISH. + +Marc Hoersken (25 Mar 2015) +- configure: error if explicitly enabled clear-memory is not supported + + This takes 22bd8d81d8fab956085e2079bf8c29872455ce59 and + b8289b625e291bbb785ed4add31f4759241067f3 into account, + but still makes it enabled by default if it is supported + and error out in case it is unsupported and was requested. + +Daniel Stenberg (25 Mar 2015) +- configure: make clear-memory default but only WARN if backend unsupported + + ... instead of previous ERROR. + +Marc Hoersken (24 Mar 2015) +- wincng.h: fix warning about computed return value not being used + +- nonblocking examples: fix warning about unused tvdiff on Mac OS X + +Daniel Stenberg (24 Mar 2015) +- openssl: fix compiler warnings + +- cofigure: fix --disable-clear-memory check + +Marc Hoersken (23 Mar 2015) +- scp.c: improved command length calculation + + Reduced number of calls to strlen, because shell_quotearg already + returns the length of the resulting string (e.q. quoted path) + which we can add to the existing and known cmd_len. + Removed obsolete call to memset again, because we can put a final + NULL-byte at the end of the string using the calculated length. + +- scp.c: improved and streamlined formatting + +- scp.c: fix that scp_recv may transmit not initialised memory + +- scp.c: fix that scp_send may transmit not initialised memory + + Fixes ticket 244. Thanks Torsten. + +- kex: do not ignore failure of libssh2_sha1_init() + + Based upon 43b730ce56f010e9d33573fcb020df49798c1ed8. + Fixes ticket 290. Thanks for the suggestion, mstrsn. + +- wincng.h: fix return code of libssh2_md5_init() + +- openssl.c: fix possible segfault in case EVP_DigestInit fails + +- wincng.c: fix possible use of uninitialized variables + +- wincng.c: fix unused argument warning if clear memory is not enabled + +- wincng: Added explicit clear memory feature to WinCNG backend + + This re-introduces the original feature proposed during + the development of the WinCNG crypto backend. It still needs + to be added to libssh2 itself and probably other backends. + + Memory is cleared using the function SecureZeroMemory which is + available on Windows systems, just like the WinCNG backend. + +- wincng.c: fixed mixed line-endings + +- wincng.c: fixed use of invalid parameter types in a8d14c5dcf + +- wincng.c: only try to load keys corresponding to the algorithm + +- wincng.c: moved PEM headers into definitions + +- wincng.h: fixed invalid parameter name + +- wincng: fixed mismatch with declarations in crypto.h + +- userauth.c: fixed warning C6001: using uninitialized sig and sig_len + +- pem.c: fixed warning C6269: possible incorrect order of operations + +- wincng: add support for authentication keys to be passed in memory + + Based upon 18cfec8336e and daa2dfa2db. + +- pem.c: add _libssh2_pem_parse_memory to parse PEM from memory + + Requirement to implement 18cfec8336e for Libgcrypt and WinCNG. + +- pem.c: fix copy and paste mistake from 55d030089b8 + +- userauth.c: fix another possible dereference of a null pointer + +- userauth.c: fix possible dereference of a null pointer + +- pem.c: reduce number of calls to strlen in readline + +Alexander Lamaison (17 Mar 2015) +- [Will Cosgrove brought this change] + + Initialise HMAC_CTX in more places. + + Missed a couple more places we init ctx to avoid openssl threading crash. + +- Build build breakage in WinCNG backend caused when adding libssh2_userauth_publickey_frommemory. + + The new feature isn't implemented for the WinCNG backend currently, but the WinCNG backend didn't contain any implementation of the required backend functions - even ones that returns an error. That caused link errors. + + This change fixes the problem by providing an implementation of the backend functions that returns an error. + +- Fix breakage in WinCNG backend caused by introducing libssh2_hmac_ctx_init. + + The macro was defined to nothing for the libgcrypt backend, but not for WinCNG. This brings the latter into line with the former. + +Daniel Stenberg (15 Mar 2015) +- userauth_publickey_frommemory.3: add AVAILABILITY + + ... it will be added in 1.6.0 + +- libssh2: next version will be called 1.6.0 + + ... since we just added a new function. + +- docs: add libssh2_userauth_publickey_frommemory.3 to dist + + The function and man page were added in commit 18cfec8336e + +- [Jakob Egger brought this change] + + direct_tcpip: Fixed channel write + + There were 3 bugs in this loop: + 1) Started from beginning after partial writes + 2) Aborted when 0 bytes were sent + 3) Ignored LIBSSH2_ERROR_EAGAIN + + See also: + https://trac.libssh2.org/ticket/281 + https://trac.libssh2.org/ticket/293 + +Alexander Lamaison (15 Mar 2015) +- [Will Cosgrove brought this change] + + Must init HMAC_CTX before using it. + + Must init ctx before using it or openssl will reuse the hmac which is not thread safe and causes a crash. + Added libssh2_hmac_ctx_init macro. + +- Add continuous integration configurations. + + Linux-based CI is done by Travis CI. Windows-based CI is done by Appveyor. + +- [David Calavera brought this change] + + Allow authentication keys to be passed in memory. + + All credits go to Joe Turpin, I'm just reaplying and cleaning his patch: + http://www.libssh2.org/mail/libssh2-devel-archive-2012-01/0015.shtml + + * Use an unimplemented error for extracting keys from memory with libgcrypt. + +Daniel Stenberg (14 Mar 2015) +- docs: include the renamed INSTALL* files in dist + +Alexander Lamaison (13 Mar 2015) +- Prevent collisions between CMake and Autotools in examples/ and tests/. + +- Avoid clash between CMake build and Autotools. + + Autotools expects a configuration template file at src/libssh2_config.h.in, which buildconf generates. But the CMake build system has its CMake-specific version of the file at this path. This means that, if you don't run buildconf, the Autotools build will fail because it configured the wrong header template. + + See https://github.com/libssh2/libssh2/pull/8. + +- Merge pull request #8 from alamaison/cmake + + CMake build system. + +- CMake build system. + + Tested: + - Windows: + - Visual C++ 2005/2008/2010/2012/2013/MinGW-w64 + - static/shared + - 32/64-bit + - OpenSSL/WinCNG + - Without zlib + - Linux: + - GCC 4.6.3/Clang 3.4 + - static/shared + - 32/64-bit + - OpenSSL/Libgcrypt + - With/Without zlib + - MacOS X + - AppleClang 6.0.0 + - static + - 64-bit + - OpenSSL + - Without zlib + + Conflicts: + README + +- Man man syntax tests fail gracefully if man version is not suitable. + +- Return valid code from test fixture on failure. + + The sshd test fixture was returning -1 if an error occurred, but negative error codes aren't technically valid (google it). Bash on Windows converted them to 0 which made setup failure look as though all tests were passing. + +- Let mansyntax.sh work regardless of where it is called from. + +Daniel Stenberg (12 Mar 2015) +- [Viktor Szakáts brought this change] + + mingw build: allow to pass custom CFLAGS + + Allow to pass custom `CFLAGS` options via environment variable + `LIBSSH2_CFLAG_EXTRAS`. Default and automatically added options of + `GNUmakefile` have preference over custom ones. This addition is useful + for passing f.e. custom CPU tuning or LTO optimization (`-flto + -ffat-lto-objects`) options. The only current way to do this is to edit + `GNUmakefile`. This patch makes it unnecessary. + + This is a mirror of similar libcurl patch: + https://github.com/bagder/curl/pull/136 + +- [Will Cosgrove brought this change] + + userauth: Fixed prompt text no longer being copied to the prompts struct + + Regression from 031566f9c + +- README: update the git repo locations + +- wait_socket: wrong use of difftime() + + With reversed arguments it would always return a negative value... + + Bug: https://github.com/bagder/libssh2/issues/1 + +- bump: start working toward 1.5.1 now + +Version 1.5.0 (11 Mar 2015) + +Daniel Stenberg (11 Mar 2015) +- RELEASE-NOTES: 1.5.0 release + +- [Mariusz Ziulek brought this change] + + kex: bail out on rubbish in the incoming packet + + CVE-2015-1782 + + Bug: http://www.libssh2.org/adv_20150311.html + +- docs: move INSTALL, AUTHORS, HACKING and TODO to docs/ + + And with this, cleanup README to be shorter and mention the new source + code home. + +- .gitignore: don't ignore INSTALL + +Dan Fandrich (4 Mar 2015) +- examples/x11.c: include sys/select.h for improved portability + +Daniel Stenberg (4 Mar 2015) +- RELEASE-NOTES: synced with a8473c819bc068 + + In preparation for the upcoming 1.5.0 release. + +Guenter Knauf (8 Jan 2015) +- NetWare build: added some missing exports. + +Marc Hoersken (29 Dec 2014) +- knownhost.c: fix use of uninitialized argument variable wrote + + Detected by clang scan in line 1195, column 18. + +- examples/x11.c: fix result of operation is garbage or undefined + + Fix use of uninitialized structure w_size_bck. + Detected by clang scan in line 386, column 28. + +- examples/x11.c: remove dead assigments of some return values + + Detected by clang scan in line 212, column 9. + Detected by clang scan in line 222, column 13. + Detected by clang scan in line 410, column 13. + +- examples/x11.c: fix possible memory leak if read fails + + Detected by clang scan in line 224, column 21. + +- examples/x11.c: fix invalid removal of first list element + + Fix use of memory after it was being freed. + Detected by clang scan in line 56, column 12. + +- userauth.c: make sure that sp_len is positive and avoid overflows + + ... if the pointer subtraction of sp1 - pubkey - 1 resulted in a + negative or larger value than pubkey_len, memchr would fail. + + Reported by Coverity CID 89846. + +- channel.c: remove logically dead code, host cannot be NULL here + + ... host cannot be NULL in line 525, because it is always + valid (e.g. at least set to "0.0.0.0") after lines 430 and 431. + + Reported by Coverity CID 89807. + +- session.c: check return value of session_nonblock during startup + + Reported by Coverity CID 89803. + +- session.c: check return value of session_nonblock in debug mode + + Reported by Coverity CID 89805. + +- pem.c: fix mixed line-endings introduced with 8670f5da24 + +- pem.c: make sure there's a trailing zero and b64data is not NULL + + ... if there is no base64 data between PEM header and footer. + Reported by Coverity CID 89823. + +- kex.c: make sure mlist is not set to NULL + + ... if the currently unsupported LANG methods are called. + Reported by Coverity CID 89834. + +- packet.c: i < 256 was always true and i would overflow to 0 + + Visualize that the 0-termination is intentional, because the array + is later passed to strlen within _libssh2_packet_askv. + +- silence multiple data conversion warnings + +Daniel Stenberg (23 Dec 2014) +- agent_connect_unix: make sure there's a trailing zero + + ... if the path name was too long. Reported by Coverity CID 89801. + +Marc Hoersken (22 Dec 2014) +- examples on Windows: use native SOCKET-type instead of int + + And check return values accordingly. + +- userauth.c: improve readability and clarity of for-loops + +Daniel Stenberg (22 Dec 2014) +- calloc: introduce LIBSSH2_CALLOC() + + A simple function using LIBSSH2_ALLOC + memset, since this pattern was + used in multiple places and this simplies code in general. + +Marc Hoersken (15 Dec 2014) +- libssh2_priv.h: Ignore session, context and format parameters + +- x11 example: check return value of socket function + +- examples: fixed mixed line-endings introduced with aedfba25b8 + +- wincng.c: explicitly ignore BCrypt*AlgorithmProvider return codes + + Fixes VS2012 code analysis warning C6031: + return value ignored: could return unexpected value + +- wincng.c: fix possible invalid memory write access + + Fixes VS2012 code analysis warning C6386: + buffer overrun: accessing 'pbOutput', the writable size is + 'cbOutput' bytes, but '3' bytes may be written: libssh2 wincng.c 610 + +- tests on Windows: check for WSAStartup return code + + Fixes VS2012 code analysis warning C6031: + return value ignored: could return unexpected value + +- wincng.c: fix possible NULL pointer de-reference of bignum + + Fixes VS2012 code analysis warning C6011: + dereferencing NULL pointer 'bignum'. libssh2 wincng.c 1567 + +- wincng.c: fix possible use of uninitialized memory + + Fixes VS2012 code analysis warning C6001: + using uninitialized memory 'cbDecoded'. libssh2 wincng.c 553 + +- packet.c: fix possible NULL pointer de-reference within listen_state + + Fixes VS2012 code analysis warning C6011: + dereferencing NULL pointer 'listen_state->channel'. libssh2 packet.c 221 + +- kex.c: fix possible NULL pointer de-reference with session->kex + + Fixes VS2012 code analysis warning C6011: + dereferencing NULL pointer 'session->kex'. libssh2 kex.c 1761 + +- agent.c: check return code of MapViewOfFile + + Fixes VS2012 code analysis warning C6387: 'p+4' may be '0': + this does not adhere to the specification for the function + 'memcpy': libssh2 agent.c 330 + + Fixes VS2012 code analysis warning C6387: 'p' may be '0': + this does not adhere to the specification for the function + 'UnmapViewOfFile': libssh2 agent.c 333 + +- examples on Windows: check for socket return code + + Fixes VS2012 code analysis warning C28193: + The variable holds a value that must be examined + +- examples on Windows: check for WSAStartup return code + + Fixes VS2012 code analysis warning C6031: + return value ignored: could return unexpected value + +Guenter Knauf (11 Dec 2014) +- wincng.c: silent some more gcc compiler warnings. + +- wincng.c: silent gcc compiler warnings. + +- Watcom build: added support for WinCNG build. + +- build: updated dependencies in makefiles. + +Daniel Stenberg (4 Dec 2014) +- configure: change LIBS not LDFLAGS when checking for libs + + Closes #289 + + Patch-by: maurerpe + +Guenter Knauf (3 Dec 2014) +- MinGW build: some more GNUMakefile tweaks. + + test/GNUmakefile: added architecture autodetection; added switches to + CFLAGS and RCFLAGS to make sure that the right architecture is used. + Added support to build with WinCNG. + +- sftpdir.c: added authentication method detection. + + Stuff copied over from ssh2.c to make testing a bit easier. + +- NMake build: fixed LIBS settings. + +- NMake build: added support for WinCNG build. + +- MinGW build: some GNUMakefile tweaks. + + Added architecture autodetection; added switches to CFLAGS and + RCFLAGS to make sure that the right architecture is used. + Added support to build with WinCNG. + +- MinGW build: Fixed redefine warnings. + +- Updated copyright year. + +Daniel Stenberg (31 Aug 2014) +- COPYING: bump the copyright year + +Dan Fandrich (28 Jul 2014) +- docs: fixed a bunch of typos + +- docs: added missing libssh2_session_handshake.3 file + +Marc Hoersken (19 May 2014) +- wincng.c: specify the required libraries for dependencies using MSVC + + Initially reported by Bob Kast as "for MS VS builds, specify the + libraries that are required so they don't need to go into all + project files that may use this library". Thanks a lot. + +- [Bob Kast brought this change] + + windows build: do not export externals from static library + + If you are building a DLL, then you need to explicitly export each + entry point. When building a static library, you should not. + + libssh2 was exporting the entry points whether it was building a DLL or a + static library. To elaborate further, if libssh2 was used as a static + library, which was being linked into a DLL, the libssh2 API would be + exported from that separate DLL. + +Daniel Stenberg (19 May 2014) +- [Mikhail Gusarov brought this change] + + Fix typos in manpages + +Marc Hoersken (18 May 2014) +- wincng.c: Fixed memory leak in case of an error during ASN.1 decoding + +- configure: Display individual crypto backends on separate lines + + This avoids line-wrapping in between parameters and makes the + error message look like the following: + + configure: error: No crypto library found! + Try --with-libssl-prefix=PATH + or --with-libgcrypt-prefix=PATH + or --with-wincng on Windows + +- [Bob Kast brought this change] + + libssh2_priv.h: a 1 bit bit-field should be unsigned + + some compilers may not like this + +- knownhost.c: Fixed warning that pointer targets differ in signedness + +- wincng.c: Fixed warning about pointer targets differing in signedness + +- tcpip-forward.c: Fixed warning that pointer targets differ in signedness + + libssh2_channel_forward_listen_ex uses ints instead of unsigned ints. + +- misc.c: Fixed warning about mixed declarations and code + +- libgcrypt.h: Fixed warning about pointer targets differing in signedness + +- wincng.h: Fixed warning about pointer targets differing in signedness + +- misc.c: Fixed warning about unused parameter abstract + +- tcpip-forward.c: Removed unused variables shost, sport and sockopt + +- wincng.h: Added forward declarations for all WinCNG functions + + Initially reported by Bob Kast as "Wincng - define function + prototypes for wincng routines". Thanks a lot. + + Also replaced structure definitions with type definitions. + +- [Bob Kast brought this change] + + libssh2.h: on Windows, a socket is of type SOCKET, not int + +- win32: Added WinCNG targets to generated Visual Studio project + + Inspired by Bob Kast's reports, this commit enables the compilation + of libssh2 with WinCNG using the generated Visual Studio project files. + This commit adds WinCNG support to parts of the existing Win32 build + infrastructure, until new build systems, like pre-defined VS project + files or CMake files may be added. + + This commit and b20bfeb3e519119a48509a1099c06d65aa7da1d7 raise one + question: How to handle build systems, like VS project files, that + need to include all source files regardless of the desired target, + including all supported crypto backends? For now the mentioned commit + added a check for LIBSSH2_OPENSSL to openssl.c and with this commit + the supported crypto backends are hardcoded within Makefile.am. + +- libssh2_priv msvc: Removed redundant definition of inline keyword + + Initially reported by Bob Kast as "Remove redundant 'inline' define". + Thanks a lot. + +- wincng: Made data parameter to hash update function constant + + Initially reported by Bob Kast as "formal parameter must be const + since it is used in contexts where the actual parameter may be const". + Thanks a lot. + +- wincng: fix cross-compilation against the w64 mingw-runtime package + +- openssl: Check for LIBSSH2_OPENSSL in order to compile with openssl + +- wincng: Fixed use of possible uninitialized variable pPaddingInfo + + Reported by Bob Kast, thanks a lot. + +- wincng: Added cast for double to unsigned long conversion + +- wincng: Cleaned up includes and check NTSTATUS using macro + + Removed header file combination that is not supported on a real + Windows platform and can only be compiled using MinGW. Replaced + custom NTSTATUS return code checks with BCRYPT_SUCCESS macro. + +Daniel Stenberg (16 Mar 2014) +- userauth_hostbased_fromfile: zero assign to avoid uninitialized use + + Detected by clang-analyze + +- channel_receive_window_adjust: store windows size always + + Avoid it sometimes returning without storing it, leaving calling + functions with unknown content! + + Detected by clang-analyzer + +- publickey_packet_receive: avoid junk in returned pointers + + clang-analyzer found this risk it would return a non-initialized pointer + in a success case + +Peter Stuge (16 Mar 2014) +- [Marc Hoersken brought this change] + + Added Windows Cryptography API: Next Generation based backend + +- [Marc Hoersken brought this change] + + knownhost.c: fixed that 'key_type_len' may be used uninitialized + + ../src/knownhost.c: In function 'libssh2_knownhost_readline': + ../src/knownhost.c:651:16: warning: 'key_type_len' may be used + uninitialized in this function [-Wmaybe-uninitialized] + rc = knownhost_add(hosts, hostbuf, NULL, + ^ + ../src/knownhost.c:745:12: note: 'key_type_len' was declared here + size_t key_type_len; + ^ + +- [Marc Hoersken brought this change] + + pem.c: always compile pem.c independently of crypto backend + +- Fix non-autotools builds: Always define the LIBSSH2_OPENSSL CPP macro + + Commit d512b25f69a1b6778881f6b4b5ff9cfc6023be42 introduced a crypto + library abstraction in the autotools build system, to allow us to more + easily support new crypto libraries. In that process it was found that + all other build system which we support are hard-coded to build with + OpenSSL. Commit f5c1a0d98bd51aeb24aca3d49c7c81dcf8bd858d fixes automake + introduced into non-autotools build systems but still overlooked the + CPP macro saying that we are using OpenSSL. + + Thanks to Marc Hörsken for identifying this issue and proposing a fix + for win32/{GNUmakefile,config.mk}. This commit uses a slightly different + approach but the end result is the same. + +Dan Fandrich (15 Mar 2014) +- channel_close: Close the channel even in the case of errors + +- sftp_close_handle: ensure the handle is always closed + + Errors are reported on return, but otherwise the close path is + completed as much as possible and the handle is freed on exit. + +Alexander Lamaison (6 Mar 2014) +- knownhost: Restore behaviour of `libssh2_knownhost_writeline` with short buffer. + + Commit 85c6627c changed the behaviour of `libssh2_knownhost_writeline` so that it stopped returning the number of bytes needed when the given buffer was too small. Also, the function changed such that is might write to part of the buffer before realising it is too small. + + This commit restores the original behaviour, whilst keeping the unknown-key-type functionality that 85c6627c. Instead of writing to the buffer piecemeal, the length of the various parts is calculated up front and the buffer written only if there is enough space. The calculated necessary size is output in `outlen` regardless of whether the buffer was written to. + + The main use-case for the original behaviour that this commit restores is to allow passing in a NULL buffer to get the actual buffer size needed, before calling the function again with the buffer allocated to the exact size required. + +- knownhost: Fix DSS keys being detected as unknown. + + I missing `else` meant ssh-dss format keys were being re-detected as unknown format. + +Dan Fandrich (6 Mar 2014) +- knownhosts: Abort if the hosts buffer is too small + + This could otherwise cause a match on the wrong host + +- agent_list_identities: Fixed memory leak on OOM + +- Fixed a few typos + +- userauth: Fixed an attempt to free from stack on error + +- Fixed a few memory leaks in error paths + +- Fixed two potential use-after-frees of the payload buffer + + The first might occur if _libssh2_packet_add returns an error, as + fullpacket_state wasn't reset to idle so if it were possible for + fullpacket to be called again, it would return to the same state + handler and re-use the freed p->packet buffer. + + The second could occur if decrypt returned an error, as it freed the + packet buffer but did not clear total_num, meaning that freed buffer + could be written into again later. + +Alexander Lamaison (28 Nov 2013) +- Fix missing `_libssh2_error` in `_libssh2_channel_write`. + + In one case, the error code from `_libssh2_transport_read` was being returned from `_libssh2_channel_write` without setting it as the last error by calling `_libssh2_error`. This commit fixes that. + + Found when using a session whose socket had been inadvertently destroyed. The calling code got confused because via `libssh2_session_last_error` it appeared no error had occurred, despite one being returned from the previous function. + +Kamil Dudka (21 Nov 2013) +- [Mark McPherson brought this change] + + openssl: initialise the digest context before calling EVP_DigestInit() + + When using the OpenSSL libraries in FIPS mode, the function call + EVP_DigestInit() is actually #defined to FIPS_digestinit(). + Unfortunately wheres EVP_DigestInit() initialises the context and then + calls EVP_DigestInit_ex(), this function assumes that the context has + been pre-initialised and crashes when it isn't. + + Bug: https://trac.libssh2.org/ticket/279 + + Fixes #279 + +- [Marc Hörsken brought this change] + + .gitignore: Ignore files like src/libssh2_config.h.in~ + +Peter Stuge (13 Nov 2013) +- Move automake conditionals added by commit d512b25f out of Makefile.inc + + Commit d512b25f69a1b6778881f6b4b5ff9cfc6023be42 added automake + conditionals to Makefile.inc but since Makefile.inc is included + from Makefile for all other build systems that does not work. + + This commit instead adds Makefile.OpenSSL.inc and Makefile.libgcrypt.inc + and moves the automake conditional to its proper place, src/Makefile.am. + + The automake conditional includes the correct Makefile.$name.inc per + the crypto library selection/detection done by configure. + + All non-autotools build system files in libssh2 are hardcoded to use + OpenSSL and do not get a conditional but at least there is some reuse + because they can all include the new Makefile.OpenSSL.inc. + +Daniel Stenberg (27 Oct 2013) +- [Salvador Fandino brought this change] + + Set default window size to 2MB + + The default channel window size used until now was 256KB. This value is + too small and results on a bottleneck on real-life networks where + round-trip delays can easily reach 300ms. + + The issue was not visible because the configured channel window size + was being ignored and a hard-coded value of ~22MB being used instead, + but that was fixed on a previous commit. + + This patch just changes the default window size + (LIBSSH2_CHANNEL_WINDOW_DEFAULT) to 2MB. It is the same value used by + OpenSSH and in our opinion represents a good compromise between memory + used and transfer speed. + + Performance tests were run to determine the optimum value. The details + and related discussion are available from the following thread on the + libssh2 mailing-list: + + http://www.libssh2.org/mail/libssh2-devel-archive-2013-10/0018.shtml + http://article.gmane.org/gmane.network.ssh.libssh2.devel/6543 + + An excerpt follows: + + "I have been running some transfer test and measuring their speed. + + My setup was composed of a quad-core Linux machine running Ubuntu 13.10 + x86_64 with a LXC container inside. The data transfers were performed + from the container to the host (never crossing through a physical + network device). + + Network delays were simulated using the tc tool. And ping was used to + verify that they worked as intended during the tests. + + The operation performed was the equivalent to the following ssh command: + + $ ssh container "dd bs=16K count=8K if=/dev/zero" >/dev/null + + Though, establishment and closing of the SSH connection was excluded + from the timings. + + I run the tests several times transferring files of sizes up to 128MB + and the results were consistent between runs. + + The results corresponding to the 128MB transfer are available here: + + https://docs.google.com/spreadsheet/ccc?key=0Ao1yRmX6PQQzdG5wSFlrZl9HRWNET3ZyN0hnaGo5ZFE&usp=sharing + + It clearly shows that 256KB is too small as the default window size. + Moving to a 512MB generates a great improvement and after the 1MB mark + the returns rapidly diminish. Other factors (TCP window size, probably) + become more limiting than the channel window size + + For comparison I also performed the same transfers using OpenSSH. Its + speed is usually on par with that of libssh2 using a window size of 1MB + (even if it uses a 2MB window, maybe it is less aggressive sending the + window adjust msgs)." + + Signed-off-by: Salvador Fandino + +- [Salvador brought this change] + + _libssh2_channel_read: Honour window_size_initial + + _libssh2_channel_read was using an arbitrary hard-coded limit to trigger + the window adjusting code. The adjustment used was also hard-coded and + arbitrary, 15MB actually, which would limit the usability of libssh2 on + systems with little RAM. + + This patch, uses the window_size parameter passed to + libssh2_channel_open_ex (stored as remote.window_size_initial) plus the + buflen as the base for the trigger and the adjustment calculation. + + The memory usage when using the default window size is reduced from 22MB + to 256KB per channel (actually, if compression is used, these numbers + should be incremented by ~50% to account for the errors between the + decompressed packet sizes and the predicted sizes). + + My tests indicate that this change does not impact the performance of + transfers across localhost or a LAN, being it on par with that of + OpenSSH. On the other hand, it will probably slow down transfers on + networks with high bandwidth*delay when the default window size + (LIBSSH2_CHANNEL_WINDOW_DEFAULT=256KB) is used. + + Signed-off-by: Salvador Fandino + +- [Salvador Fandino brought this change] + + knownhosts: handle unknown key types + + Store but don't use keys of unsupported types on the known_hosts file. + + Currently, when libssh2 parses a known_host file containing keys of some + type it doesn't natively support, it stops reading the file and returns + an error. + + That means, that the known_host file can not be safely shared with other + software supporting other key types (i.e. OpenSSH). + + This patch adds support for handling keys of unknown type. It can read + and write them, even if they are never going to be matched. + + At the source level the patch does the following things: + + - add a new unknown key type LIBSSH2_KNOWNHOST_KEY_UNKNOWN + + - add a new slot (key_type_name) on the known_host struct that is + used to store the key type in ascii form when it is not supported + + - parse correctly known_hosts entries with unknown key types and + populate the key_type_name slot + + - print correctly known_hosts entries of unknown type + + - when checking a host key ignore keys that do not match the key + + Fixes #276 + +- windows build: fix build errors + + Fixes various link errors with VS2010 + + Reported-by: "kdekker" + Fixes #272 + +- man page: add missing function argument + + for libssh2_userauth_publickey_fromfile_ex() + + Reported-by: "pastey" + + Fixes #262 + +- [Salvador brought this change] + + Fix zlib deflate usage + + Deflate may return Z_OK even when not all data has been compressed + if the output buffer becomes full. + + In practice this is very unlikely to happen because the output buffer + size is always some KBs larger than the size of the data passed for + compression from the upper layers and I think that zlib never expands + the data so much, even on the worst cases. + + Anyway, this patch plays on the safe side checking that the output + buffer is not exhausted. + + Signed-off-by: Salvador + +- [Salvador brought this change] + + comp_method_zlib_decomp: Improve buffer growing algorithm + + The old algorithm was O(N^2), causing lots and lots of reallocations + when highly compressed data was transferred. + + This patch implements a simpler one that just doubles the buffer size + everytime it is exhausted. It results in O(N) complexity. + + Also a smaller inflate ratio is used to calculate the initial size (x4). + + Signed-off-by: Salvador + +- [Salvador brought this change] + + Fix zlib usage + + Data may remain in zlib internal buffers when inflate() returns Z_OK + and avail_out == 0. In that case, inflate has to be called again. + + Also, once all the data has been inflated, it returns Z_BUF_ERROR to + signal that the input buffer has been exhausted. + + Until now, the way to detect that a packet payload had been completely + decompressed was to check that no data remained on the input buffer + but that didn't account for the case where data remained on the internal + zlib buffers. + + That resulted in packets not being completely decompressed and the + missing data reappearing on the next packet, though the bug was masked + by the buffer allocation algorithm most of the time and only manifested + when transferring highly compressible data. + + This patch fixes the zlib usage. + + Signed-off-by: Salvador + +- [Salvador brought this change] + + _libssh2_channel_read: fix data drop when out of window + + After filling the read buffer with data from the read queue, when the + window size was too small, "libssh2_channel_receive_window_adjust" was + called to increase it. In non-blocking mode that function could return + EAGAIN and, in that case, the EAGAIN was propagated upwards and the data + already read on the buffer lost. + + The function was also moving between the two read states + "libssh2_NB_state_idle" and "libssh2_NB_state_created" both of which + behave in the same way (excepting a debug statment). + + This commit modifies "_libssh2_channel_read" so that the + "libssh2_channel_receive_window_adjust" call is performed first (when + required) and if everything goes well, then it reads the data from the + queued packets into the read buffer. + + It also removes the useless "libssh2_NB_state_created" read state. + + Some rotted comments have also been updated. + + Signed-off-by: Salvador + +- [Salvador Fandino brought this change] + + window_size: redid window handling for flow control reasons + + Until now, the window size (channel->remote.window_size) was being + updated just after receiving the packet from the transport layer. + + That behaviour is wrong because the channel queue may grow uncontrolled + when data arrives from the network faster that the upper layer consumes + it. + + This patch adds a new counter, read_avail, which keeps a count of the + bytes available from the packet queue for reading. Also, now the window + size is adjusted when the data is actually read by an upper layer. + + That way, if the upper layer stops reading data, the window will + eventually fill and the remote host will stop sending data. When the + upper layers reads enough data, a window adjust packet is delivered and + the transfer resumes. + + The read_avail counter is used to detect the situation when the remote + server tries to send data surpassing the window size. In that case, the + extra data is discarded. + + Signed-off-by: Salvador + +Peter Stuge (15 Sep 2013) +- configure.ac: Call zlib zlib and not libz in text but keep option names + +- configure.ac: Reorder --with-* options in --help output + +- configure.ac: Rework crypto library detection + + This further simplifies adding new crypto libraries. + +- Clean up crypto library abstraction in build system and source code + + libssh2 used to explicitly check for libgcrypt and default to OpenSSL. + + Now all possible crypto libraries are checked for explicitly, making + the addition of further crypto libraries both simpler and cleaner. + +- configure.ac: Add zlib to Requires.private in libssh2.pc if using zlib + +- Revert "Added Windows Cryptography API: Next Generation based backend" + + This reverts commit d385230e15715e67796f16f3e65fd899f21a638b. + +Daniel Stenberg (7 Sep 2013) +- [Leif Salomonsson brought this change] + + sftp_statvfs: fix for servers not supporting statfvs extension + + Fixes issue arising when server does not support statfvs and or fstatvfs + extensions. sftp_statvfs() and sftp_fstatvfs() after this patch will + handle the case when SSH_FXP_STATUS is returned from server. + +- [Marc Hoersken brought this change] + + Added Windows Cryptography API: Next Generation based backend + +- [Kamil Dudka brought this change] + + partially revert "window_size: explicit adjustments only" + + This partially reverts commit 03ca9020756a4e16f0294e5b35e9826ee6af2364 + in order to fix extreme slowdown when uploading to localhost via SFTP. + + I was able to repeat the issue on RHEL-7 on localhost only. It did not + occur when uploading via network and it did not occur on a RHEL-6 box + with the same version of libssh2. + + The problem was that sftp_read() used a read-ahead logic to figure out + the window_size, but sftp_packet_read() called indirectly from + sftp_write() did not use any read-ahead logic. + +- _libssh2_channel_write: client spins on write when window full + + When there's no window to "write to", there's no point in waiting for + the socket to become writable since it most likely just will continue to + be. + + Patch-by: ncm + Fixes #258 + +- _libssh2_channel_forward_cancel: avoid memory leaks on error + + Fixes #257 + +- _libssh2_packet_add: avoid using uninitialized memory + + In _libssh2_packet_add, called by _libssh2_packet_read, a call to + _libssh2_packet_send that is supposed to send a one-byte message + SSH_MSG_REQUEST_FAILURE would send an uninitialized byte upon re-entry + if its call to _send returns _EAGAIN. + + Fixes #259 + +- _libssh2_channel_forward_cancel: accessed struct after free + + ... and the assignment was pointless anyway since the struct was about + to be freed. Bug introduced in dde2b094. + + Fixes #268 + +Peter Stuge (2 Jun 2013) +- [Marc Hoersken brought this change] + + Fixed compilation using mingw-w64 + +- [Marc Hoersken brought this change] + + knownhost.c: use LIBSSH2_FREE macro instead of free + + Use LIBSSH2_FREE instead of free since + _libssh2_base64_encode uses LIBSSH2_ALLOC + +Daniel Stenberg (18 May 2013) +- [Matthias Kerestesch brought this change] + + libssh2_agent_init: init ->fd to LIBSSH2_INVALID_SOCKET + + ... previously it was left at 0 which is a valid file descriptor! + + Bug: https://trac.libssh2.org/ticket/265 + + Fixes #265 + +- userauth_password: pass on the underlying error code + + _libssh2_packet_requirev() may return different errors and we pass that + to the parent instead of rewriting it. + + Bug: http://libssh2.org/mail/libssh2-devel-archive-2013-04/0029.shtml + Reported by: Cosmin + +Peter Stuge (9 May 2013) +- [Marc Hoersken brought this change] + + libcrypt.c: Fix typo in _libssh2_rsa_sha1_sign() parameter type + +Kamil Dudka (4 May 2013) +- configure.ac: replace AM_CONFIG_HEADER with AC_CONFIG_HEADERS + + Reported by: Quintus + Bug: https://trac.libssh2.org/ticket/261 + +Guenter Knauf (12 Apr 2013) +- Fixed copyright string for NetWare build. + +Daniel Stenberg (9 Apr 2013) +- [Richard W.M. Jones brought this change] + + sftp: Add support for fsync (OpenSSH extension). + + The new libssh2_sftp_fsync API causes data and metadata in the + currently open file to be committed to disk at the server. + + This is an OpenSSH extension to the SFTP protocol. See: + + https://bugzilla.mindrot.org/show_bug.cgi?id=1798 + +- [Richard W.M. Jones brought this change] + + sftp: statvfs: Along error path, reset the correct 'state' variable. + +- [Richard W.M. Jones brought this change] + + sftp: seek: Don't flush buffers on same offset + + Signed-off-by: Richard W.M. Jones + +Guenter Knauf (9 Feb 2013) +- Updated dependency libs. + +- Fixed tool macro names. + +Daniel Stenberg (29 Nov 2012) +- [Seth Willits brought this change] + + compiler warnings: typecast strlen in macros + + ... in macro parameters to avoid compiler warnings about lost precision. + + Several macros in libssh2.h call strlen and pass the result directly to + unsigned int parameters of other functions, which warns about precision + loss because strlen returns size_t which is unsigned long on at least + some platforms (such as OS X). The fix is to simply typecast the + strlen() result to unsigned int. + +- libssh2.h: bump version to 1.4.4-DEV + +Version 1.4.3 (27 Nov 2012) + +Daniel Stenberg (27 Nov 2012) +- RELEASE-NOTES: fixed for 1.4.3 + +- sftp_read: return error if a too large package arrives + +Peter Stuge (13 Nov 2012) +- Only define _libssh2_dsa_*() functions when building with DSA support + +Guenter Knauf (8 Nov 2012) +- Added .def file to output. + +Kamil Dudka (1 Nov 2012) +- libssh2_hostkey_hash.3: update the description of return value + + The function returns NULL also if the hash algorithm is not available. + +Guenter Knauf (24 Oct 2012) +- Fixed mode acciedently committed. + +- Ignore generated file. + +- Added hack to make use of Makefile.inc. + + This should avoid further maintainance of the objects list. + +- Fixed MSVC NMakefile. + + Added missing source files; added resource for DLL. + +Kamil Dudka (22 Oct 2012) +- examples: use stderr for messages, stdout for data + + Reported by: Karel Srot + Bug: https://bugzilla.redhat.com/867462 + +- openssl: do not leak memory when handling errors + + ,.. in aes_ctr_init(). Detected by Coverity. + +- channel: fix possible NULL dereference + + ... in libssh2_channel_get_exit_signal(). Detected by Coverity. + +- Revert "aes: the init function fails when OpenSSL has AES support" + + This partially reverts commit f4f2298ef3635acd031cc2ee0e71026cdcda5864. + + We need to use the EVP_aes_???_ctr() functions in FIPS mode. + +- crypt: use hard-wired cipher block sizes consistently + +- openssl: do not ignore failure of EVP_CipherInit() + +- kex: do not ignore failure of libssh2_md5_init() + + The MD5 algorithm is disabled when running in FIPS mode. + +Daniel Stenberg (21 Aug 2012) +- [Peter Krempa brought this change] + + known_hosts: Fail when parsing unknown keys in known_hosts file. + + libssh2_knownhost_readfile() silently ignored problems when reading keys + in unsupported formats from the known hosts file. When the file is + written again from the internal structures of libssh2 it gets truntcated + to the point where the first unknown key was located. + + * src/knownhost.c:libssh2_knownhost_readfile() - return error if key + parsing fails + +- AUTHORS: synced with 42fec44c8a4 + + 31 recent authors added + +- [Dave Hayden brought this change] + + compression: add support for zlib@openssh.com + + Add a "use_in_auth" flag to the LIBSSH2_COMP_METHOD struct and a + separate "zlib@openssh.com" method, along with checking session->state + for LIBSSH2_STATE_AUTHENTICATED. Appears to work on the OpenSSH servers + I've tried against, and it should work as before with normal zlib + compression. + +- [Dmitry Smirnov brought this change] + + configure: gcrypt doesn't come with pkg-config support + + ... so use plain old -lgcrypt to the linker to link with it. + + Fixes #225 + +- sftp_read: Value stored to 'next' is never read + + Detected by clang-analyzer + +- publickey_init: errors are negative, fix check + + Detected by clang-analyzer. + +- [Maxime Larocque brought this change] + + session_free: wrong variable used for keeping state + + If libssh2_session_free is called without the channel being freed + previously by libssh2_channel_free a memory leak could occur. + + A mismatch of states variables in session_free() prevent the call to + libssh2_channel_free function. session->state member is used instead of + session->free_state. + + It causes a leak of around 600 bytes on every connection on my systems + (Linux, x64 and PPC). + + (Debugging done under contract for Accedian Networks) + + Fixes #246 + +Guenter Knauf (29 Jun 2012) +- Small NetWare makefile tweak. + +- Some small Win32 makefile fixes. + +Daniel Stenberg (19 Jun 2012) +- libssh2_userauth_publickey_fromfile_ex.3: mention publickey == NULL + +- comp_method_zlib_decomp: handle Z_BUF_ERROR when inflating + + When using libssh2 to perform an SFTP file transfer from the "JSCAPE MFT + Server" (http://www.jscape.com) the transfer failed. The default JSCAPE + configuration is to enforce zlib compression on SSH2 sessions so the + session was compressed. The relevant part of the debug trace contained: + + [libssh2] 1.052750 Transport: unhandled zlib error -5 + [libssh2] 1.052750 Failure Event: -29 - decompression failure + + The trace comes from comp_method_zlib_decomp() in comp.c. The "unhandled + zlib error -5" is the status returned from the zlib function + inflate(). The -5 status corresponds to "Z_BUF_ERROR". + + The inflate() function takes a pointer to a z_stream structure and + "inflates" (decompresses) as much as it can. The relevant fields of the + z_stream structure are: + + next_in - pointer to the input buffer containing compressed data + avail_in - the number of bytes available at next_in + next_out - pointer to the output buffer to be filled with uncompressed + data + avail_out - how much space available at next_out + + To decompress data you set up a z_stream struct with the relevant fields + filled in and pass it to inflate(). On return the fields will have been + updated so next_in and avail_in show how much compressed data is yet to + be processed and next_out and avail_out show how much space is left in + the output buffer. + + If the supplied output buffer is too small then on return there will be + compressed data yet to be processed (avail_in != 0) and inflate() will + return Z_OK. In this case the output buffer must be grown, avail_out + updated and inflate() called again. + + If the supplied output buffer was big enough then on return the + compressed data will have been exhausted (avail_in == 0) and inflate() + will return Z_OK, so the data has all been uncompressed. + + There is a corner case where inflate() makes no progress. That is, there + may be unprocessed compressed data and space available in the output + buffer and yet the function does nothing. In this case inflate() will + return Z_BUF_ERROR. From the zlib documentation and the source code it + is not clear under what circumstances this happens. It could be that it + needs to write multiple bytes (all in one go) from its internal state to + the output buffer before processing the next chunk of input but but + can't because there is not enough space (though my guesses as to the + cause are not really relevant). Recovery from Z_BUF_ERROR is pretty + simple - just grow the output buffer, update avail_out and call + inflate() again. + + The comp_method_zlib_decomp() function does not handle the case when + inflate() returns Z_BUF_ERROR. It treats it as a non-recoverable error + and basically aborts the session. + + Fixes #240 + +Guenter Knauf (12 Jun 2012) +- MinGW makefile tweaks. + + Use GNU tools when compiling on Linux. + Fixed dist and dev targets. + +- NetWare makefile tweaks. + + Changed to use Windows commandline tools instead of + GNU tools when compiling on Windows. Fixed dist and + dev targets. Enabled nlmconv error for unresolved + symbols. + +Daniel Stenberg (11 Jun 2012) +- Revert "config.rpath: generated file, no need to keep in git" + + This reverts commit 1ac7bd09cc685755577fb2c8829adcd081e7ab3c. + + This file still used by lib/*m4 functions so we need to keep the file + around. + +- BINDINGS: added PySsh2, a Python-ctypes binding + +Guenter Knauf (8 Jun 2012) +- Fixed MinGW debug build. + +Daniel Stenberg (5 Jun 2012) +- BINDINGS: Added the Cocoa/Objective-C one + + ... and sorted the bindings after the languages, alphabetically + + Reported by: Mike Abdullah + +- BINDINGS: document the bindings we know of + +Guenter Knauf (4 Jun 2012) +- Fixed LIBSSH2_INT64_T_FORMAT macro. + + Usually a format macro should hold the whole format, otherwise + it should be named a prefix. Also fixed usage of this macro in + scp.c for a signed var where it was used as prefix for unsigned. + +- Removed obsolete define from makefiles. + +- Renamed NetWare makefiles. + +- Renamed NetWare makefiles. + +- Synced MinGW makefiles with 56c64a6..39e438f. + + Also synced MinGW test makefile with b092696..f8cb874. + +Peter Stuge (30 May 2012) +- Revert "sftp: Don't send attrs.permissions on read-only SSH_FXP_OPEN" + + This reverts commit 04e79e0c798674a0796be8a55f63dd92e6877790. + +- sftp: Don't send attrs.permissions on read-only SSH_FXP_OPEN + + This works around a protocol violation in the ProFTPD 1.3.4 mod_sftp + server, as reported by Will Cosgrove in: + + http://libssh2.org/mail/libssh2-devel-archive-2012-05/0079.shtml + + Based on a suggested fix by TJ Saunders in: + + http://libssh2.org/mail/libssh2-devel-archive-2012-05/0104.shtml + +Guenter Knauf (28 May 2012) +- Try to detect OpenSSL build type automatically. + + Also fixed recently added libgdi32 linkage which is only + required when OpenSSL libs are linked statically. + +Daniel Stenberg (25 May 2012) +- config.rpath: generated file, no need to keep in git + +Guenter Knauf (22 May 2012) +- Updated dependency libary versions. + +Daniel Stenberg (18 May 2012) +- 1.4.3: towards the future + +Version 1.4.2 (18 May 2012) + +Daniel Stenberg (18 May 2012) +- RELEASE-NOTES: synced with 92a9f952794 + +Alexander Lamaison (15 May 2012) +- win32/libssh2_config.h: Remove hardcoded #define LIBSSH2_HAVE_ZLIB. + + Rationale: Everything else in this file states a fact about the win32 + platform that is unconditional for that platform. There is nothing + unconditional about the presence of zlib. It is neither included with + Windows nor with the platform SDK. Therefore, this is not an appropriate + place to assert its presence. Especially as, once asserted, it cannot be + overridden using a compiler flag. + + In contrast, if it is omitted, then it can easily be reasserted by adding + a compiler flag defining LIBSSH2_HAVE_ZLIB. + +Daniel Stenberg (14 May 2012) +- RELEASE-NOTES: synced with 69a3354467c + +- _libssh2_packet_add: SSH_MSG_CHANNEL_REQUEST default to want_reply + + RFC4254 says the default 'want_reply' is TRUE but the code defaulted to + FALSE. Now changed. + + Fixes #233 + +- gettimeofday: no need for a replacement under cygwin + + Fixes #224 + +Alexander Lamaison (13 May 2012) +- Prevent sftp_packet_read accessing freed memory. + + sftp_packet_add takes ownership of the packet passed to it and (now that we + handle zombies) might free the packet. sftp_packet_read uses the packet type + byte as its return code but by this point sftp_packet_add might have freed + it. This change fixes the problem by caching the packet type before calling + sftp_packet_add. + + I don't understand why sftp_packet_read uses the packet type as its return + code. A future change might get rid of this entirely. + +Daniel Stenberg (12 May 2012) +- sftp_packet_flush: flush zombies too + + As this function is called when the SFTP session is closed, it needs to + also kill all zombies left in the SFTP session to avoid leaking memory + just in case some zombie would still be in there. + +- sftp_packetlist_flush: zombies must not have responses already + + When flushing the packetlist, we must only add the request as a zombie + if no response has already been received. Otherwise we could wrongly + make it a zombie even though the response was already received and then + we'd get a zombie stuck there "forever"... + +- sftp_read: on EOF remove packet before flush + + Since the sftp_packetlist_flush() function will move all the existing + FXP_READ requests in this handle to the zombie list we must first remove + this just received packet as it is clearly not a zombie. + +- sftp_packet_require: sftp_packet_read() returning 0 is not an error + + Exactly as the comment in the code said, checking the return code from + sftp_packet_read() with <= was wrong and it should be < 0. With the new + filtering on incoming packets that are "zombies" we can now see this + getting zero returned. + +- sftp_packetlist_flush: only make it zombie if it was sent + + The list of outgoing packets may also contain packets that never were + sent off and we better not make them zombies too. + +- [Alexander Lamaison brought this change] + + Mark outstanding read requests after EOF as zombies. + + In order to be fast, sftp_read sends many read requests at once. With a small + file, this can mean that when EOF is received back, many of these requests are + still outstanding. Responses arriving after we close the file and abandon the + file handle are queued in the SFTP packet queue and never collected. This + causes transfer speed to drop as a progressively longer queue must be searched + for every packet. + + This change introduces a zombie request-ID list in the SFTP session that is + used to recognise these outstanding requests and prevent them being added to + the queue. + +Peter Stuge (23 Apr 2012) +- [Rafael Kitover brought this change] + + Update win32/GNUmakefile to use OpenSSL 1.0.1a + + libcrypto on win32 now depends on gdi32.dll, so move the OpenSSL LDLIBS + block to before the compiler definitions, so that libcrypto gets added + first, and then add -lgdi32 into the following common LDLIBS for gcc. + +Guenter Knauf (23 Apr 2012) +- Changed 'Requires' to 'Requires.private'. + + Only static builds need to link against the crypto libs. + +- Fixed 'Requires:' names. + + The 'Requires:' line lists the names of the .pc files. + +- Added 'Requires:' line to libssh2.pc. + + This is necessary so that other libs which lookup libssh2 info + via pkg-config can add the right crypto lib dependencies. + +- Updated dependency lib versions. + +Peter Stuge (18 Apr 2012) +- configure.ac: Add option to disable build of the example applications + + Examples are built by default. Any of the following options on the + configure command line will skip building them: + + --disable-examples-build + --enable-examples-build=no + --enable-examples-build=false + +- userauth.c: fread() from public key file to correctly detect any errors + + If the filename parameter for file_read_publickey() was the name of a + directory instead of a file then libssh2 would spin trying to fgetc() + from the FILE * for the opened directory when trying to determine the + length of the encoded public key, since fgetc() can't report errors. + + Use fread() instead to correctly detect this error condition along + with many others. + + This fixes the problem reported in + http://www.libssh2.org/mail/libssh2-devel-archive-2012-04/0021.shtml + + Reported-by: Oleksiy Zagorskyi + +- Return LIBSSH2_ERROR_SOCKET_DISCONNECT on EOF when reading banner + +Guenter Knauf (17 Apr 2012) +- Fixed copyright year. + +- Updated dependency lib versions in static makefiles. + +Daniel Stenberg (6 Apr 2012) +- version: bump to 1.4.2 + + We're on the 1.4.2 track now (at least) + +Version 1.4.1 (4 Apr 2012) + +Daniel Stenberg (4 Apr 2012) +- RELEASE-NOTES: updated for 1.4.1 release + +- always do "forced" window updates + + When calling _libssh2_channel_receive_window_adjust() internally, we now + always use the 'force' option to prevent libssh2 to avoid sending the + update if the update isn't big enough. + + It isn't fully analyzed but we have seen corner cases which made a + necessary window update not get send due to this and then the other side + doesn't send data our side then sits waiting for forever. + +- channel_read: force window adjusts! + + if there's not enough room to receive the data that's being requested, + the window adjustment needs to be sent to the remote and thus the force + option has to be used. _libssh2_channel_receive_window_adjust() would + otherwise "queue" small window adjustments for a later packet but that + is really terribly for the small buffer read that for example is the + final little piece of a very large file as then there is no logical next + packet! + + Reported by: Armen Babakhanian + Bug: http://www.libssh2.org/mail/libssh2-devel-archive-2012-03/0130.shtml + +- [Paul Howarth brought this change] + + aes: the init function fails when OpenSSL has AES support + + The internal init function only worked fine when the configure script + didn't detect the OpenSSL AES_CTR function! + + Bug: http://www.libssh2.org/mail/libssh2-devel-archive-2012-03/0111.shtml + Reported by: Paul Howarth + +- [Matthew Booth brought this change] + + transport_send: Finish in-progress key exchange before sending data + + _libssh2_channel_write() first reads outstanding packets before writing + new data. If it reads a key exchange request, it will immediately start + key re-exchange, which will require sending a response. If the output + socket is full, this will result in a return from + _libssh2_transport_read() of LIBSSH2_ERROR_EAGAIN. In order not to block + a write because there is no data to read, this error is explicitly + ignored and the code continues marshalling a packet for sending. When it + is sent, the remote end immediately drops the connection because it was + expecting a continuation of the key exchange, but got a data packet. + + This change adds the same check for key exchange to + _libssh2_transport_send() that is in _libssh2_transport_read(). This + ensures that key exchange is completed before any data packet is sent. + +- channel_write: acknowledge transport errors + + When draining data off the socket with _libssh2_transport_read() (which + in turn has to be done so that we can be sure to have read any possible + window-increasing packets), this code previously ignored errors which + could lead to nasty loops. Now all error codes except EAGAIN will cause + the error to be returned at once. + + Bug: http://www.libssh2.org/mail/libssh2-devel-archive-2012-03/0068.shtml + Reported by: Matthew Booth + +- [Steven Dake brought this change] + + In examples/x11.c, Make sure sizeof passed to read operation is correct + + sizeof(buf) expands to 8 or 4 (since its a pointer). This variable may + have been static in the past, leading to this error. + + Signed-off-by: Steven Dake + +- [Steven Dake brought this change] + + Fix suspicious sizeof usage in examples/x11.c + + In the x11 example, sizeof(buf) = 8UL (on x86_64), when this should + probably represent the buffer size available. I am not sure how to + test that this change is actually correct, however. + + Signed-off-by: Steven Dake + +- sftp_packet_read: follow-up fix for EAGAIN/window adjust + + The commit in 7194a9bd7ba45 wasn't complete. This change makes sure + variables are initialized properly before used in the EAGAIN and window + adjust cases. + +- sftp_packet_add: use named error code instead of number + +- sftp_packet_add: verify the packet before accepting it + + In order to bail out as quickly as possible when things are wrong and + out of sync, make sure the SFTP message is one we understand. + +- SFTP: preserve the original error code more + + Lots of places in the code translated the original error into the more + generic LIBSSH2_ERROR_SOCKET_TIMEOUT but this turns out to distort the + original error reason a lot and makes tracking down the real origin of a + problem really hard. This change makes the original error code be + preserved to a larger extent when return up to the parent function. + +- sftp_packet_read: adjust window size as necessary + + Commit 03ca9020756 tried to simplify the window sizing logic but broke + SFTP readdir as there was no window sizing code left there so large + directory listings no longer worked. + + This change introduces window sizing logic to the sftp_packet_read() + function so that it now tells the remote about the local size having a + window size that suffice when it is about to ask for directory data. + + Bug: http://www.libssh2.org/mail/libssh2-devel-archive-2012-03/0069.shtml + Reported by: Eric + +- [Steven Dake brought this change] + + Tell C compiler we don't care about return code of libssh2_init + + The call of libssh2_init returns a return code, but nothing could be done + within the _libssh2_init_if_needed execution path. + + Signed-off-by: Steven Dake + +- [Steven Dake brought this change] + + Add comment indicating a resource leak is not really a resource leak + + While possibly obvious to those investigating the code, coverity complains + about this out of scope leak. + + Signed-off-by: Steven Dake + +- [Steven Dake brought this change] + + Use safer snprintf rather then sprintf in scp_send() + + Signed-off-by: Steven Dake + +- [Steven Dake brought this change] + + Use safer snprintf rather then sprintf in scp_recv() + + While the buffer is indeed allocated to a safe length, better safe then sorry. + + Signed-off-by: Steven Dake + +- [Steven Dake brought this change] + + use snprintf in knownhost_writeline() rather then sprintf + + Although the function checks the length, if the code was in error, there + could potentially be a buffer overrun with the use of sprintf. Instead replace + with snprintf. + + Signed-off-by: Steven Dake + +- [Steven Dake brought this change] + + Add tracing to print packets left on session at libssh2_session_free + + Signed-off-by: Steven Dake + +Peter Stuge (2 Mar 2012) +- Define and use LIBSSH2_INVALID_SOCKET instead of INVALID_SOCKET + + INVALID_SOCKET is a special value in Windows representing a + non-valid socket identifier. We were #defining this to -1 on + non-Windows platforms, causing unneccessary namespace pollution. + Let's have our own identifier instead. + + Thanks to Matt Lawson for pointing this out. + +- nw/Makefile.netware: Fix project name typo to avoid needless confusion + +- example/x11: Set raw terminal mode manually instead of with cfmakeraw() + + OpenSolaris has no cfmakeraw() so to make the example more portable + we simply do the equivalent operations on struct termios ourselves. + + Thanks to Tom Weber for reporting this problem, and finding a solution. + +Daniel Stenberg (17 Feb 2012) +- sftp_write: cannot return acked data *and* EAGAIN + + Whenever we have acked data and is about to call a function that *MAY* + return EAGAIN we must return the number now and wait to get called + again. Our API only allows data *or* EAGAIN and we must never try to get + both. + +Peter Stuge (13 Feb 2012) +- example/x11: Build only when sys/un.h is found by configure + + The example can't be built on systems without AF_UNIX sockets. + +Daniel Stenberg (10 Feb 2012) +- [Alexander Lamaison brought this change] + + Simplified sftp_read. + + Removed the total_read variable that originally must have tracked how + much data had been written to the buffer. With non-blocking reads, we + must return straight away once we have read data into the buffer so this + variable served not purpose. + + I think it was still hanging around in case the initial processing of + 'leftover' data meant we wrote to the buffer but this case, like the + others, must return immediately. Now that it does, the last remaining + need for the variable is gone. + +- [Alexander Lamaison brought this change] + + Cleaned up sftp_read and added more explanation. + + Replaced the gotos which were implementing the state machine with + a switch statement which makes the states more explicit. + +- sftp_read: avoid data *and* EAGAIN + + Whenever we have data and is about to call a function that *MAY* return + EAGAIN we must return the data now and wait to get called again. Our API + only allows data *or* EAGAIN and we must never try to get both. + +Peter Stuge (2 Feb 2012) +- Add a tcpip-forward example which demonstrates remote port forwarding + +- libssh2.h: Add missing prototype for libssh2_session_banner_set() + +- example/subsystem_netconf.c: Return error when read buffer is too small + + Also remove a little redundancy in the read loop condition. + +- example/subsystem_netconf.c: Add a missing newline in an error message + +- Fix undefined reference to _libssh_error in libgcrypt backend + + Commit 209de22299b4b58e582891dfba70f57e1e0492db introduced a function + call to a non-existing function, and since then the libgcrypt backend + has not been buildable. + +Version 1.4.0 (31 Jan 2012) + +Daniel Stenberg (31 Jan 2012) +- RELEASE-NOTES: synced with 6bd584d29 for 1.4.0 + +- s/1.3.1/1.4.0 + + We're bumping the minor number + +- [Jernej Kovacic brought this change] + + libssh2_session_supported_algs: fix compiler warning + +- [Jernej Kovacic brought this change] + + session_supported_algs docs: added an example + +- [Gellule Xg brought this change] + + sftp-seek: clear EOF flag + + Set the EOF flag to False when calling seek64 to be able to get some + data back on a following read + +- [Peter Krempa brought this change] + + userauth: Provide more informations if ssh pub key extraction fails + + If the function that extracts/computes the public key from a private key + fails the errors it reports were masked by the function calling it. This + patch modifies the key extraction function to return errors using + _libssh_error() function. The error messages are tweaked to contain + reference to the failed operaton in addition to the reason. + + * AUTHORS: - add my name + * libgcrypt.c: _libssh2_pub_priv_keyfile(): - return a more verbose + error using + _libssh2_error() func. + * openssl.c: - modify call graph of _libssh2_pub_priv_keyfile() to use + _libssh2_error for error reporting(); + * userauth.c: - tweak functions calling _libssh2_pub_priv_keyfile() not + to shadow error messages + +- TODO: remove issues we (sort of) did already + +- ssh2_exec: skip error outputs for EAGAIN + + Since the example uses non-blocking mode, it will just flood the output + with this "nonsense" error. + +Guenter Knauf (30 Nov 2011) +- Some NetWare makefile tweaks. + +Daniel Stenberg (18 Nov 2011) +- LIBSSH2_SFTP_PACKET_MAXLEN: increase to 80000 + + Some SFTP servers send SFTP packets larger than 40000. Since the limit + is only present to avoid insane sizes anyway, we can easily bump it. + + The define was formerly in the public header libssh2_sftp.h but served + no external purpose and was moved into the source dir. + + Bug: http://www.libssh2.org/mail/libssh2-devel-archive-2011-11/0004.shtml + Reported by: Michael Harris + +Alexander Lamaison (18 Nov 2011) +- [Peter Krempa brought this change] + + knownhost_check(): Don't dereference ext if NULL is passed + + Documentation for libssh2_knownhost_checkp() and related functions + states that the last argument is filled with data if non-NULL. + + "knownhost if set to non-NULL, it must be a pointer to a 'struct + libssh2_knownhost' pointer that gets filled in to point to info about a + known host that matches or partially matches." + + In this function ext is dereferenced even if set to NULL, causing + segfault in applications not needing the extra data. + +Daniel Stenberg (11 Nov 2011) +- [Peter Krempa brought this change] + + knownhost_add: Avoid dereferencing uninitialized memory on error path. + + In function knownhost_add, memory is alocated for a new entry. If normal + alocation is used, memory is not initialized to 0 right after, but a + check is done to verify if correct key type is passed. This test is done + BEFORE setting the memory to null, and on the error path function + free_host() is called, that tries to dereference unititialized memory, + resulting into a glibc abort(). + + * knownhost.c - knownhost_add(): - move typemask check before alloc + +- windows build: add define to avoid compiler warning + + A recent mingw compiler has started to complain on "#warning Please + include winsock2.h before windows.h" unless the magic define is set + first. + + Reported by: Vincent Torri + Bug: http://www.libssh2.org/mail/libssh2-devel-archive-2011-10/0064.shtml + +Henrik Nordstrom (31 Oct 2011) +- [Vincent Torri brought this change] + + Correct Windows include file name case, simplifying cross-compilation + + When cross compiling to Windows, libssh2.h include Windows header files + with upper case filenames : BaseTsd.h and WinSock2.h. + + These files have lowercase names with mingw-w64 (iirc, it's the same with + mingw). And as on Windows, being lowercase or uppercase does not matter. + +Daniel Stenberg (25 Oct 2011) +- [Jernej Kovacic brought this change] + + libssh2_session_supported_algs: added + +- [Kamil Dudka brought this change] + + example/sftp_RW_nonblock: do not ignore LIBSSH2_ERROR_EAGAIN + + Bug: https://bugzilla.redhat.com/745420 + +Peter Stuge (5 Oct 2011) +- example/ssh2_agent: Print host key fingerprint before authentication + + Also moves the comment about not being authenticated to before the + agent authentication takes place, so that it better matches the code. + +Daniel Stenberg (29 Sep 2011) +- OpenSSL EVP: fix threaded use of structs + + Make sure we don't clear or reset static structs after first init so + that they work fine even when used from multiple threads. Init the + structs in the global init. + + Help and assistance by: John Engstrom + + Fixes #229 (again) + +- openssl: don't init static structs differently + + make_ctr_evp() is changed to take a struct pointer, and then each + _libssh2_EVP_aes_[keylen]_ctr function is made to pass in their own + static struct + + Reported by: John Engstrom + Fixes #229 + +Guenter Knauf (27 Sep 2011) +- Removed obsolete include path. + +Daniel Stenberg (21 Sep 2011) +- read_state: clear the state variable better + + Set read_state back to idle before trying to send anything so that if + the state somehow is wrongly set. + + Also, avoid such a case of confusion by resetting the read_state when an + sftp handle is closed. + +- sftp_read: remove leftover fprintf + + Reported by: Alexander Lamaison + +- sftp.h: fix the #ifdef to prevent multiple inclusions + +- sftp_read: use a state variable to avoid bad writes + + When a channel_write call has gotten an EAGAIN back, we try harder to + continue the same write in the subsequent invoke. + +- window_size: explicit adjustments only + + Removed the automatic window_size adjustments from + _libssh2_channel_read() and instead all channel readers must now make + sure to enlarge the window sizes properly themselves. + + libssh2_channel_read_ex() - the public function, now grows the window + size according to the requested buffer size. Applications can still opt + to grow the window more on demand. Larger windows tend to give higher + performance. + + sftp_read() now uses the read-ahead logic to figure out a window_size. + +- libssh2.h: bump the default window size to 256K + +- libssh2_userauth_keyboard_interactive.3: fix man warning + + It seemed to occur due to the excessive line length + +- [Mikhail Gusarov brought this change] + + Add missing .gitignore entries + +- [Mikhail Gusarov brought this change] + + Add manpage syntax checker to 'check' target + + In virtually every libssh2 release Debian's lintian catches syntax errors in + manpages. Prevent it by checking manpages as a part of testsuite. + +- libssh2_banner_set.3: fix nroff syntax mistake + +Guenter Knauf (10 Sep 2011) +- Use predefined resource compiler macro. + +- Added casts to silent compiler warnings. + +- Fixed uint64_t printf. + +- Fixed macro function signatures. + +- NetWare makefile tweaks. + +- Removed unused var. + +- Added 2 samples not mentioned. + +- Dont build x11 sample with MinGW. + +- Fixed executable file description. + +- Removed unused var. + +- Kill stupid gcc 3.x uninitialized warning. + +- Build all examples. + +- More MinGW makefile tweaks. + + Renamed *.mingw makefiles to GNUmakefile since GNU make picks these + up automatically, and therefore win32/Makefile removed. + +- Removed forgotten WINSOCK_VERSION defines. + +Daniel Stenberg (9 Sep 2011) +- libssh2_session_startup(3) => libssh2_session_handshake(3) + + Propagate for the current function in docs and examples. + libssh2_session_startup() is deprecated. + +- libssh2_banner_set => libssh2_session_banner_get + + Marked the old function as deprecated. Added the new name in the correct + name space with the same arguments and functionality. + +- new function: libssh2_session_banner_get + + Returns the banner from the server handshake + + Fixes #226 + +- libssh2.h: bump version to 1.4.0 for new function(s) + +- remove embedded CVS/svn tags + +- [liuzl brought this change] + + API add:libssh2_sftp_get_channel + + Return the channel of sftp, then caller can + control the channel's behavior. + + Signed-off-by: liuzl + +- _libssh2_channel_read: react on errors from receive_window_adjust + + Previously the function would ignore all errors except for EAGAIN. + +- sftp_read: extend and clarify the documentation + +- sftp_read: cap the read ahead maximum amount + + Now we only go up to LIBSSH2_CHANNEL_WINDOW_DEFAULT*30 bytes SFTP read + ahead, which currently equals 64K*30 == 1966080 bytes. + +- _libssh2_channel_read: fix non-blocking window adjusting + + If EAGAIN is returned when adjusting the receive window, we must not + read from the transport directly until we've finished the adjusting. + +Guenter Knauf (8 Sep 2011) +- Fix for systems which need sys/select.h. + +- The files were not gone but renamed ... + +Daniel Stenberg (6 Sep 2011) +- sftp_read: added documenting comment + + Taken from some recent email conversations I added some descriptions of + the logic in sftp_read() to aid readers. + +- 1.3.1: start the work + +Version 1.3.0 (6 Sep 2011) + +Daniel Stenberg (6 Sep 2011) +- Makefile.am: the Makefile.win32 files are gone + +- RELEASE-NOTES: updated for 1.3.0 + +- sftp_read: a short read is not end of file + + A returned READ packet that is short will now only reduce the + offset. + + This is a temporary fix as it is slightly better than the previous + approach but still not very good. + +- [liuzl brought this change] + + _libssh2_packet_add: adjust window size when truncating + + When receiving more data than what the window size allows on a + particular channel, make sure that the window size is adjusted in that + case too. Previously it would only adjust the window in the non-error + case. + +Guenter Knauf (29 Aug 2011) +- Silent compiler warning with MinGW64. + +- Fixed link to native Win32 awk tool. + +- Renamed MinGW makefiles. + +- Some MinGW makefile tweaks. + + Enable build without GNU tools and with MinGW64 compiler. + +- Fixed aes_ctr_do_cipher() signature. + +Daniel Stenberg (26 Aug 2011) +- [liuzl brought this change] + + libssh2_sftp_seek64: flush packetlist and buffered data + + When seeking to a new position, flush the packetlist and buffered data + to prevent already received or pending data to wrongly get used when + sftp-reading from the new offset within the file. + +- sftp_read: advance offset correctly for buffered copies + + In the case where a read packet has been received from the server, but + the entire contents couldn't be copied to the user-buffer, the data is + instead buffered and copied to the user's buffer in the next invocation + of sftp_read(). When that "extra" copy is made, the 'offset' pointer was + not advanced accordingly. + + The biggest impact of this flaw was that the 'already' variable at the + top of the function that figures out how much data "ahead" that has + already been asked for would slowly go more and more out of sync, which + could lead to the file not being read all the way to the end. + + This problem was most noticable in cases where the application would + only try to read the exact file size amount, like curl does. In the + examples libssh2 provides the sftp read function is most often called + with a fixed size large buffer and then the bug would not appear as + easily. + + This bug was introduced in the SFTP rewrite in 1.2.8. + + Bug: http://curl.haxx.se/mail/lib-2011-08/0305.html + http://www.libssh2.org/mail/libssh2-devel-archive-2011-08/0085.shtml + +- wrap some long lines < 80 columns + +- LIBSSH2_RECV: fix typo, use the RECV_FD macro + +- subsystem_netconf.c: fix compiler warnings + +- [Henrik Nordstrom brought this change] + + Custom callbacks for performing low level socket I/O + +- version bump: start working towards 1.3.0 + +Version 1.2.9 (16 Aug 2011) + +Daniel Stenberg (16 Aug 2011) +- RELEASE-NOTES: synced with 95d69d3a81261 + +- [Henrik Nordstrom brought this change] + + Document prototypes for macro defined functions + +- [Henrik Nordstrom brought this change] + + Avoid reuse after free when closing X11 channels + +- _libssh2_channel_write: handle window_size == 0 better + + When about to send data on the channel and the window size is 0, we must + not just return 0 if the transport_read() function returned EAGAIN as it + then causes a busy-loop. + + Bug: http://libssh2.org/mail/libssh2-devel-archive-2011-08/0011.shtml + +- gettimeofday: fix name space pollution + + For systems without its own gettimeofday() implementation, we still must + not provide one outside our namespace. + + Reported by: Bill Segall + +Dan Fandrich (5 Aug 2011) +- libssh2.pc.in: Fixed spelling in pkgconfig file + +Peter Stuge (17 Jul 2011) +- example/subsystem_netconf.c: Add missing #include + +- example/subsystem_netconf.c: Discard ]]>]]> and return only XML response + +- example/subsystem_netconf.c: Fix uninitialized variable bug + +- example: Add subsystem_netconf.c + + This example demonstrates how to use libssh2 to send a request to + the NETCONF subsystem available e.g. in JunOS. + + See also http://tools.ietf.org/html/draft-ietf-netconf-ssh-06 + +Daniel Stenberg (16 Jul 2011) +- man page cleanups: non-existing functions need no man pages + +- libssh2_new_host_entry.3: removed + + This is just junk leftovers. + +- userauth_keyboard_interactive: fix buffer overflow + + Partly reverse 566894494b4972ae12 which was simplifying the code far too + much and ended up overflowing a buffer within the LIBSSH2_SESSION + struct. Back to allocating the buffer properly like it used to do. + + Bug: http://www.libssh2.org/mail/libssh2-devel-archive-2011-06/0032.shtml + Reported by: Alfred Gebert + +- keyboard-interactive man page: cleaned up + +- [Alfred Gebert brought this change] + + _libssh2_recv(): handle ENOENT error as EAGAIN + + A sftp session failed with error "failure establishing ssh session" on + Solaris and HP-UX. Sometimes the first recv() function call sets errno + to ENOENT. In the man pages for recv of Solaris and HP-UX the error + ENOENT is not documented. + + I tested Solaris SPARC and x86, HP-UX i64, AIX, Windows and Linux. + +- agent_list_identities: fix out of scope access + + An auto variable out of scope was being referenced and used. + + fixes #220 + +- _libssh2_wait_socket: fix timeouts for poll() uses + +- windows: inclusion fix + + include winsock2.h for all windows compilers + +- keyb-interactive: add the fixed buffer + + Belongs to commit 5668944 + +- code cleanup: don't use C99/c++ comments + + We aim for C89 compliance + +- keyb-interactive: allow zero length fields + + Allow zero length fields so they don't cause malloc(0) calls + + Avoid free()ing NULL pointers + + Avoid a malloc of a fixed 5 byte buffer. + +- libssh2_channel_process_startup.3: clean up + + Remove the references to the macro-fied shortcuts as they have their own + individual man pages. + + Made the prototype different and more readable. + +- man page: fix .BR lines + + We don't use \fI etc on .BR lines + +- userauth_keyboard_interactive: skip code on zero length auth + +- libssh2_channel_forward_accept.3: mention how to get error + + Since this returns a pointer, libssh2_session_last_errno() must be used + to get the actual error code and it wasn't that clear before. + +- timeout docs: mention they're added in 1.2.9 + +- sftp_write_sliding.c: indent fix + + Use the standard indenting and removed CVS leftover comment + +- [zl liu brought this change] + + sftp_write_sliding: send the complete file + + When reaching the end of file there can still be data left not sent. + +- [Douglas Masterson brought this change] + + session_startup: init state properly + + libssh2_session_startup() didn't set the state correctly so it could get + confused. + + Fixes #218 + +- timeout: added man pages + +- BLOCK_ADJUST_ERRNO: move rc to right level + + We can't declare the variable within the block and use it in the final + do-while() expression to be properly portable C89. + +- [Matt Lilley brought this change] + + adds a timeout to blocking calls + + Fixes bug #160 as per Daniel's suggestion + + Adds libssh2_session_set_timeout() and libssh2_session_get_timeout() + +- SCP: fix incorrect error code + + After an error occurs in libssh2_scp_recv() or libssh2_scp_send(), the + function libssh2_session_last_error() would return + LIBSSH2_ERROR_SOCKET_NONE on error. + + Bug: http://trac.libssh2.org/ticket/216 + Patch by: "littlesavage" + + Fixes #216 + +Guenter Knauf (19 Apr 2011) +- Updated default (recommended) dependency versions. + +Daniel Stenberg (17 Apr 2011) +- libssh2_session_block_directions: fix mistake + + The last LIBSSH2_SESSION_BLOCK_INBOUND should be + LIBSSH2_SESSION_BLOCK_OUTBOUND + + And I shortened the short description + + Reported by: "drswinghead" + +- msvcproj: added libs and debug stuff + + Added libraries needed to link whether using openssl dynamically or + statically + + Added LIBSSH2DEBUG define to debug versions to enable tracing + + URL: http://trac.libssh2.org/ticket/215 + Patch by: Mark Smith + +- sftp_write: clean offsets on error + + When an error has occurred on FXP_WRITE, we must make sure that the + offset, sent offset and acked counter are reset properly. + +- example/.gitignore: ignore built binaries + +- sftp_write: flush the packetlist on error + + When an error occurs during write, flush the entire list of pending + outgoing SFTP packets. + +- keepalive: add first basic man pages + + Someone on IRC pointed out that we don't have these documented so I + wrote up a first set based on the information in the wiki: + http://trac.libssh2.org/wiki/KeepAlive + +- scp_write_nonblock.c: remove pointless check + + libssh2_channel_write() cannot return a value that is larger than the + input length value + +Mikhail Gusarov (9 Apr 2011) +- s/\.NF/.nf/ to fix wrong macro name caught by man --warnings + +Daniel Stenberg (6 Apr 2011) +- version: bump to 1.2.9_dev + + Also update the copyright year range to include 2011 + +- configure: fix $VERSION + + Stop using the $VERSION variable as it seems to be magically used by + autoconfig itself and thus gets set to the value set in AC_INIT() + without us wanting that. $LIBSSH2VER is now the libssh2 version as + detected. + + Reported by: Paul Howarth + Bug: http://www.libssh2.org/mail/libssh2-devel-archive-2011-04/0008.shtml + +- maketgz: use git2news.pl by the correct name + +Version 1.2.8 (4 Apr 2011) + +Daniel Stenberg (4 Apr 2011) +- RELEASE-NOTES: synced with fabf1a45ee + +- NEWS: auto-generated from git + + Starting now, the NEWS file is generated from git using the git2news.pl + script. This makes it always accurate and up-to-date, even for daily + snapshots etc. + +- sftp_write: handle FXP_WRITE errors + + When an sftp server returns an error back on write, make sure the + function bails out and returns the proper error. + +- configure: stop using the deprecated AM_INIT_AUTOMAKE syntax + +Alexander Lamaison (13 Mar 2011) +- Support unlimited number of host names in a single line of the known_hosts file. + + Previously the code assumed either a single host name or a hostname,ip-address pair. However, according to the spec [1], there can be any number of comma separated host names or IP addresses. + + [1] http://www.openbsd.org/cgi-bin/man.cgi?query=sshd&sektion=8 + +Daniel Stenberg (26 Feb 2011) +- libssh2_knownhost_readfile.3: clarify return value + + This function returns the number of parsed hosts on success, not just + zero as previously documented. + +Peter Stuge (26 Feb 2011) +- Don't save allocated packet size until it has actually been allocated + + The allocated packet size is internal state which needs to match reality + in order to avoid problems. This commit fixes #211. + +Daniel Stenberg (21 Feb 2011) +- [Alfred Gebert brought this change] + + session_startup: manage server data before server identification + + Fix the bug that libssh2 could not connect if the sftp server + sends data before sending the version string. + + http://tools.ietf.org/html/rfc4253#section-4.2 + + "The server MAY send other lines of data before sending the version + string. Each line SHOULD be terminated by a Carriage Return and Line + Feed. Such lines MUST NOT begin with "SSH-", and SHOULD be encoded + in ISO-10646 UTF-8 [RFC3629] (language is not specified). Clients + MUST be able to process such lines." + +- [Alfred Gebert brought this change] + + fullpacket: decompression only after init + + The buffer for the decompression (remote.comp_abstract) is initialised + in time when it is needed. With this fix decompression is disabled when + the buffer (remote.comp_abstract) is not initialised. + + Bug: http://trac.libssh2.org/ticket/200 + +- _libssh2_channel_read: store last error + + When the transport layer returns EAGAIN this function didn't call + _libssh2_error() which made the last_error not get set. + +- sftp_write: clarified the comment header + +- sftp_read: avoid wrapping counter to insanity + + As pointed out in bug #206, if a second invoke of libssh2_sftp_read() + would shrink the buffer size, libssh2 would go nuts and send out read + requests like crazy. This was due to an unsigned variable turning + "negative" by some wrong math, and that value would be the amount of + data attempt to pre-buffer! + + Bug: http://trac.libssh2.org/ticket/206 + +- sftp_packet_read: use 32bit variables for 32bit data + +- libssh2_sftp_stat_ex.3: cleaned up, extended + + Removed the macros from it as they have their own man pages. + + Added the LIBSSH2_SFTP_ATTRIBUTES struct in here for easier reference. + +- sftp_readdir: return error if buffer is too small + + If asked to read data into a buffer and the buffer is too small to hold + the data, this function now returns an error instead of as previously + just copy as much as fits. + +- sftp_symlink: return error if receive buffer too small + + and clean up some variable type mismatches + + Discussion: http://www.libssh2.org/mail/libssh2-devel-archive-2011-01/0001.shtml + +- docs: clarify what happens with a too small buffer + + This flaw is subject to change, but I figured it might be valuable to + users of existing code to know how it works. + +- channel_request_pty_size: fix reqPTY_state + + The state variable isn't properly set so every other call to the + function fails! + + Bug: http://libssh2.org/mail/libssh2-devel-archive-2010-12/0096.shtml + Reported by: Steve Legg + +- data size: cleanup + + Fix 64bit warnings by using (s)size_t and dedicated uint32_t types more. + +- [Pierre Joye brought this change] + + ssize_t: proper typedef with MSVC compilers + + As discussed on the mailing list, it was wrong for win64 and using the + VC-provided type is the safest approach instead of second- guessing + which one it should be. + +Guenter Knauf (22 Dec 2010) +- Updated OpenSSL version. + +- Expanded tabs to spaces. + +Peter Stuge (21 Dec 2010) +- [Joey Degges brought this change] + + _libssh2_ntohu64: fix conversion from network bytes to uint64 + + Cast individual bytes to uint64 to avoid overflow in arithmetic. + +Daniel Stenberg (20 Dec 2010) +- libssh2_userauth_list: language fix + + "faily" is not a good English word, and I also cleaned up some other minor + mistakes + +- crypto: unify the generic functions + + Added crypto.h that is the unified header to include when using crypto + functionality. It should be the only header that needs to adapt to the + underlying crypto library in use. It provides the set of prototypes that + are library agnostic. + +- [Mark Smith brought this change] + + userauth: derive publickey from private + + Pass a NULL pointer for the publickey parameter of + libssh2_userauth_publickey_fromfile and + libssh2_userauth_hostbased_fromfile functions. In this case, the + functions recompute the public key from the private key file data. + + This is work done by Jean-Louis CHARTON + , then adapted by Mark Smith and + slightly edited further by me Daniel. + + WARNING: this does leave the feature NOT WORKING when libssh2 is built + to use libgcrypt instead of OpenSSL simply due to lack of + implementation. + +- ssh2_echo: Value stored to 'exitcode' is never read + +- _libssh2_packet_add: fix SSH_MSG_DEBUG weirdness + + I believe I may have caused this weird typo style error when I cleaned + up this function a while ago. Corrected now. + +- uint32: more longs converted to proper types + + I also moved the MAC struct over to the mac.h header file and made sure + that the users of that struct include that file. + +- SFTP: more types to uint32_t + + The 'num_names' field in the SSH_FXP_NAME response is an unsigned 32bit + value so we make sure to treat it like that. + +- SFTP: request_ids are uint32_t + + I went over the code and made sure we use uint32_t all over for the + request_id data. It is an unsigned 32bit value on the wire. + +- SFTP: store request_id separately in packets + + By using a new separate struct for incoming SFTP packets and not sharing + the generic packet struct, we can get rid of an unused field and add a + new one dedicated for holding the request_id for the incoming + package. As sftp_packet_ask() is called fairly often, a "mere" integer + comparison is MUCH faster than the previous memcmp() of (typically) 5 + bytes. + +- libssh2_sftp_open_ex: man page extended and cleaned up + + I added the missing documentation for the 'flags' argument. + +- SFTP: unify the READ/WRITE chunk structs + +- SFTP: fix memory leaks + + Make sure that we cleanup remainders when the handle is closed and when + the subsystem is shutdown. + + Existing flaw: if a single handle sends packets that haven't been + replied to yet at the time when the handle is closed, those packets will + arrive later and end up in the generic packet brigade queue and they + will remain in there until flushed. They will use unnecessary memory, + make things slower and they will ruin the SFTP handling if the + request_id counter ever wraps (highly unlikely to every happen). + +- sftp_close_handle: packet list is generic + + Fix comment, simplify the loop logic + +- sftp_read: pipeline reads + + The SFTP read function now does transfers the same way the SFTP write + function was made to recently: it creates a list of many outgoing + FXP_READ packets that each asks for a small data chunk. The code then + tries to keep sending read request while collecting the acks for the + previous requests and returns the received data. + +- sftp_write: removed unused variable + +- _libssh2_channel_close: don't call transport read if disconnected + + The loop that waits for remote.close to get set may end up looping + forever since session->socket_state gets set to + LIBSSH2_SOCKET_DISCONNECTED by the packet_add() function called from the + transport_read() function and after having been set to + LIBSSH2_SOCKET_DISCONNECTED, the transport_read() function will only + return 0. + + Bug: http://trac.libssh2.org/ticket/198 + +- libssh2_sftp_seek64: new man page + + Split off libssh2_sftp_seek64 from the libssh2_sftp_seek man page, and + mentioned that we consider the latter deprecated. Also added a mention + about the dangers of doing seek during writing or reading. + +- sftp_seek: fix + + The new SFTP write code caused a regression as the seek function no + longer worked as it didn't set the write position properly. + + It should be noted that seeking is STRONGLY PROHIBITED during upload, as + the upload magic uses two different offset positions and the multiple + outstanding packets etc make them sensitive to change in the midst of + operations. + + This functionality was just verified with the new example code + sftp_append. This bug was filed as bug #202: + + Bug: http://trac.libssh2.org/ticket/202 + +- sftp_append: new example doing SFTP append + +- MAX_SFTP_OUTGOING_SIZE: 30000 + + I ran SFTP upload tests against localhost. It showed that to make the + app reach really good speeds, I needed to do a little code tweak and + change MAX_SFTP_OUTGOING_SIZE from 4000 to 30000. The tests I did before + with the high latency tests didn't show any real difference whatever I + had that size set to. + + This number is the size in bytes that libssh2 cuts off the large input + buffer and sends off as an individual sftp packet. + +- sftp_write_sliding.c: new example + + This is an example that is very similar to sftp_write_nonblock.c, with + the exception that this uses + + 1 - a larger upload buffer + + 2 - a sliding buffer mechnism to allow the app to keep sending lots of + data to libssh2 without having to first drain the buffer. + + These are two key issues to make libssh2 SFTP uploads really perform + well at this point in time. + +- cpp: s/#elsif/#elif + + This looks like a typo as #elsif is not really C... + +- _libssh2_channel_write: revert channel_write() use + + The attempts made to have _libssh2_channel_write() accept larger pieces + of data and split up the data by itself into 32700 byte chunks and pass + them on to channel_write() in a loop as a way to do faster operations on + larger data blocks was a failed attempt. + + The reason why it is difficult: + + The API only allows EAGAIN or a length to be returned. When looping over + multiple blocks to get sent, one block can get sent and the next might + not. And yet: when transport_send() has returned EAGAIN we must not call + it again with new data until it has returned OK on the existing data it + is still working on. This makes it a mess and we do get a much easier + job by simply returning the bytes or EAGAIN at once, as in the EAGAIN + case we can assume that we will be called with the same arguments again + and transport_send() will be happy. + + Unfortunately, I think we take a small performance hit by not being able + to do this. + +- ssh2_echo: new example + + This is a new example snippet. The code is largely based on ssh2_exec, + and is written by Tommy Lindgren. I edited it into C90 compliance and to + conform to libssh2 indent style and some more. + +- send_existing: return after send_existing + + When a piece of data is sent from the send_existing() function we must + make the parent function return afterwards. Otherwise we risk that the + parent function tries to send more data and ends up getting an EGAIN for + that more data and since it can only return one return code it doesn't + return info for the successfully sent data. + + As this change is a regression I now added a larger comment explaining + why it has to work like this. + +- _libssh2_channel_write: count resent data as written + + In the logic that resends data that was kept for that purpose due to a + previous EAGAIN, the data was not counted as sent causing badness. + +Peter Stuge (13 Nov 2010) +- Use fprintf(stderr, ) instead of write(2, ) for debugging + +- session/transport: Correctly handle when _libssh2_send() returns -EAGAIN + +- src/agent.c: Simplify _libssh2_send() error checking ever so slightly + +Daniel Stenberg (12 Nov 2010) +- send/recv: use _libssh2_recv and _libssh2_send now + + Starting now, we unconditionally use the internal replacement functions + for send() and recv() - creatively named _libssh2_recv() and + _libssh2_send(). + + On errors, these functions return the negative 'errno' value instead of + the traditional -1. This design allows systems that have no "natural" + errno support to not have to invent it. It also means that no code + outside of these two transfer functions should use the errno variable. + +- channel_write: move some logic to _libssh2_channel_write + + Some checks are better done in _libssh2_channel_write just once per + write instead of in channel_write() since the looping will call the + latter function multiple times per _libssh2_channel_write() invoke. + +- sftp_write: handle "left over" acked data + + The SFTP handle struct now buffers number of acked bytes that haven't + yet been returned. The way this is used is as following: + + 1. sftp_write() gets called with a buffer of let say size 32000. We + split 32000 into 8 smaller packets and send them off one by one. One of + them gets acked before the function returns so 4000 is returned. + + 2. sftp_write() gets called again a short while after the previous one, + now with a much smaller size passed in to the function. Lets say 8000. + In the mean-time, all of the remaining packets from the previous call + have been acked (7*4000 = 28000). This function then returns 8000 as all + data passed in are already sent and it can't return any more than what + it got passed in. But we have 28000 bytes acked. We now store the + remaining 20000 in the handle->u.file.acked struct field to add up in + the next call. + + 3. sftp_write() gets called again, and now there's a backlogged 20000 + bytes to return as fine and that will get skipped from the beginning + of the buffer that is passed in. + +- sftp_write: polished and simplified + + Removed unnecessary struct fields and state changes within the function. + + Made the loop that checks for ACKs only check chunks that were fully + sent. + +- SCP: on failure, show the numerical error reason + + By calling libssh2_session_last_errno() + +- SFTP: provide the numerical error reason on failure + +- SCP: clean up failure treatment + + When SCP send or recv fails, it gets a special message from the server + with a warning or error message included. We have no current API to + expose that message but the foundation is there. Removed unnecessary use + of session struct fields. + +- sftp_write: enlarge buffer to perform better + +- packets: code cleanup + + I added size checks in several places. I fixed the code flow to be easier + to read in some places. + + I removed unnecessary zeroing of structs. I removed unused struct fields. + +- LIBSSH2_CALLBACK_MACERROR: clarify return code use + +- _libssh2_userauth_publickey: avoid shadowing + +- packet: avoid shadowing global symbols + +- sftp_readdir: avoid shadowing + +- shadowing: don't shadow the global compress + +- _libssh2_packet_add: turn ifs into a single switch + +- _libssh2_packet_add: check SSH_MSG_GLOBAL_REQUEST packet + +- _libssh2_packet_add: SSH_MSG_DEBUG length checks + + Verify lengths before using them. Read always_display from the correct + index. Don't copy stuff around just to provide zero-termination of the + strings. + +- _libssh2_packet_add: SSH_MSG_IGNORE skip memmove + + There's no promise of a zero termination of the data in the callback so + no longer perform ugly operation in order to provide it. + +- _libssh2_packet_add: SSH_MSG_DISCONNECT length checks + + Verify lengths before trying to read data. + +- indent: break lines at 80 columns + +- SSH_MSG_CHANNEL_OPEN_FAILURE: used defined values + + We don't like magic numbers in the code. Now the acceptable failure + codes sent in the SSH_MSG_CHANNEL_OPEN_FAILURE message are added as + defined values in the private header file. + +- sftp_write: don't return EAGAIN if no EAGAIN was received + + This function now only returns EAGAIN if a lower layer actually returned + EAGAIN to it. If nothing was acked and no EAGAIN was received, it will + now instead return 0. + +- _libssh2_wait_socket: detect nothing-to-wait-for + + If _libssh2_wait_socket() gets called but there's no direction set to + wait for, this causes a "hang". This code now detects this situation, + set a 1 second timeout instead and outputs a debug output about it. + +- decomp: remove the free_dest argument + + Since the decompress function ALWAYS returns allocated memory we get a + lot simpler code by removing the ability to return data unallocated. + +- decomp: cleaned off old compression stuff + + I cleared off legacy code from when the compression and decompression + functions were a single unified function. Makes the code easier to read + too. + +- [TJ Saunders brought this change] + + decomp: increase decompression buffer sizes + +- [TJ Saunders brought this change] + + zlib: Add debug tracing of zlib errors + +- sftp_packet_read: handle partial reads of the length field + + SFTP packets come as [32 bit length][payload] and the code didn't + previously handle that the initial 32 bit field was read only partially + when it was read. + +- [Jasmeet Bagga brought this change] + + kex_agree_hostkey: fix NULL pointer derefence + + While setting up the session, ssh tries to determine the type of + encryption method it can use for the session. This requires looking at + the keys offered by the remote host and comparing these with the methods + supported by libssh2 (rsa & dss). To do this there is an iteration over + the array containing the methods supported by libssh2. + + If there is no agreement on the type of encryption we come to the 3rd + entry of the hostkeyp array. Here hostkeyp is valid but *hostkep is + NULL. Thus when we dereference that in (*hostkeyp)->name there is a + crash + +- _libssh2_transport_send: remove dead assignment + + 'data' isn't accessed beyond this point so there's no need to assign it. + +- scp_recv: remove dead assignment + + Instead of assigning a variable we won't read, we now use the more + explicit (void) prefix. + +- sftp_write: removed superfluous assignment + +- bugfix: avoid use of uninitialized value + +- sftp_packet_require: propagate error codes better + + There were some chances that they would cause -1 to get returned by + public functions and as we're hunting down all such occurances and since + the underlying functions do return valuable information the code now + passes back proper return codes better. + +- [Alfred Gebert brought this change] + + fix memory leaks (two times cipher_data) for each sftp session + +- libssh2_userauth_authenticated: make it work as documented + + The man page clearly says it returns 1 for "already authenticated" but + the code said non-zero. I changed the code to use 1 now, as that is also + non-zero but it gets the benefit that it now matches the documentation. + + Using 1 instead of non-zero is better for two reasons: + + 1. We have the opportunity to introduce other return codes in the future for + things like error and what not. + 2. We don't expose the internal bitmask variable value. + +- userauth_keyboard_interactive: fix indent + +- [Alfred Gebert brought this change] + + fix memory leak in userauth_keyboard_interactive() + + First I wanted to free the memory in session_free() but then + I had still memory leaks because in my test case the function + userauth_keyboard_interactive() is called twice. It is called + twice perhaps because the server has this authentication + methods available: publickey,gssapi-with-mic,keyboard-interactive + The keyboard-interactive method is successful. + +- dist: include sftp.h in dist archives + +Simon Josefsson (27 Oct 2010) +- Update header to match new function prototype, see c48840ba88. + +Daniel Stenberg (26 Oct 2010) +- bugfixes: the transport rearrange left some subtle flaws now gone + +- libssh2_userauth_publickey_fromfile_ex.3: cleaned up looks + +- libssh2_userauth_publickey: add man page + + I found an undocumented public function and we can't have it like + that. The description here is incomplete, but should serve as a template + to allow filling in... + +- libssh2_sftp_write.3: added blurb about the "write ahead" + + Documented the new SFTP write concept + +- sftp_close_handle: free any trailing write chunks + +- _libssh2_channel_write: fix warnings + +- SFTP: bufgix, move more sftp stuff to sftp.h + + The sftp_write function shouldn't assume that the buffer pointer will be + the same in subsequent calls, even if it assumes that the data already + passed in before haven't changed. + + The sftp structs are now moved to sftp.h (which I forgot to add before) + +- SFTP: use multiple outgoing packets when writing + + sftp_write was rewritten to split up outgoing data into multiple packets + and deal with the acks in a more asynchronous manner. This is meant to + help overcome latency and round-trip problems with the SFTP protocol. + +- TODO: implemented a lot of the ideas now + +- _libssh2_channel_write: removed 32500 size limit + + Neither _libssh2_channel_write nor sftp_write now have the 32500 size + limit anymore and instead the channel writing function now has its own + logic to send data in multiple calls until everything is sent. + +- send_existing: don't tell parent to return when drained + + That will just cause unnecessary code execution. + +- _libssh2_channel_write: general code cleanup + + simplified the function and removed some unused struct fields + +- _libssh2_transport_send: replaces _libssh2_transport_write + + The new function takes two data areas, combines them and sends them as a + single SSH packet. This allows several functions to allocate and copy + less data. + + I also found and fixed a mixed up use of the compression function + arguments that I introduced in my rewrite in a recent commit. + +- scp_write_nonblock: use select() instead of busyloop + + Make this example nicer by not busylooping. + +- send_existing: clear olen when the data is sent off + +- _libssh2_transport_write: allow 256 extra bytes around the packet + +- _libssh2_transport_write: remade to send without malloc + +- compress: compression disabled by default + + We now allow libssh2_session_flag() to enable compression with a new + flag and I added documentation for the previous LIBSSH2_FLAG_SIGPIPE + flag which I wasn't really aware of! + +- comp: split the compress function + + It is now made into two separate compress and decompress functions. In + preparation for upcoming further modficications. + +Dan Fandrich (20 Oct 2010) +- Added header file to allow compiling in older environments + +Daniel Stenberg (20 Oct 2010) +- TODO: add a possible new API for SFTP transfers + +- TODO: "New Transport API" added + +- TODO: add buffering plans + +Simon Josefsson (13 Oct 2010) +- Mention libssh2_channel_get_exit_signal and give kudos. + +- [Tommy Lindgren brought this change] + + Add libssh2_channel_get_exit_signal man page. + + Signed-off-by: Simon Josefsson + +- [Tommy Lindgren brought this change] + + Add libssh2_channel_get_exit_signal. + + Signed-off-by: Simon Josefsson + +- Add libssh2_free man page and fix typo. + +- Add libssh2_free. + +Daniel Stenberg (11 Oct 2010) +- scp_recv: improved treatment of channel_read() returning zero + + As a zero return code from channel_read() is not an error we must make + sure that the SCP functions deal with that properly. channel_read() + always returns 0 if the channel is EOFed already so we check for EOF + after 0-reads to be able to return error properly. + +- libssh2_session_methods.3: detail what can be asked for + +- compression: send zlib before none + + As the list of algorithms in a preferred order we should send zlib + before none to increase the chances that the server will let us do + compression. + +- compress: faster check, better return codes + + In the transport functions we avoid a strcmp() now and just check a + boolean instead. + + The compress/decompress function's return code is now acknowledged and + used as actual return code in case of failures. + +- libssh2_session_handshake: replaces libssh2_session_startup() + + The function libssh2_session_startup() is now considered deprecated due + to the portability issue with the socket argument. + libssh2_session_handshake() is the name of the replacement. + +- libssh2_socket_t: now externally visible + + In preparation for upcominig changes, the libssh2_socket_t type is now + typedef'ed in the public header. + +- _libssh2_transport_drain: removed + + This function proved not to be used nor useful. + +- _libssh2_channel_write: don't iterate over transport writes + + When a call to _libssh2_transport_write() succeeds, we must return from + _libssh2_channel_write() to allow the caller to provide the next chunk + of data. + + We cannot move on to send the next piece of data that may already have + been provided in this same function call, as we risk getting EAGAIN for + that and we can't return information both about sent data as well as + EAGAIN. So, by returning short now, the caller will call this function + again with new data to send. + +- _libssh2_transport_write: updated documentation blurb + +- _libssh2_transport_write: remove fprintf remainder + + Mistake from previous debugging + +- session: improved errors + + Replaced -1/SOCKET_NONE errors with appropriate error defines instead. + + Made the verbose trace output during banner receiving less annoying for + non-blocking sessions. + +- crypt_init: use correct error define + +- _libssh2_error: hide EAGAIN for non-blocking sessions + + In an attempt to make the trace output less cluttered for non-blocking + sessions the error function now avoids calling the debug function if the + error is the EAGAIN and the session is non-blocking. + +- agent: use better error defines + +- comp_method_zlib_init: use correct error defines + +- transport: better error codes + + LIBSSH2_SOCKET_NONE (-1) should no longer be used as error code as it is + (too) generic and we should instead use specific and dedicated error + codes to better describe the error. + +- channel: return code and _libssh2_error cleanup + + Made sure that all transport_write() call failures get _libssh2_error + called. + +- _libssh2_channel_write: limit to 32700 bytes + + The well known and used ssh server Dropbear has a maximum SSH packet + length at 32768 by default. Since the libssh2 design current have a + fixed one-to-one mapping from channel_write() to the packet size created + by transport_write() the previous limit of 32768 in the channel layer + caused the transport layer to create larger packets than 32768 at times + which Dropbear rejected forcibly (by closing the connection). + + The long term fix is of course to remove the hard relation between the + outgoing SSH packet size and what the input length argument is in the + transport_write() function call. + +- libssh.h: add more dedicated error codes + +- SCP: allow file names with bytes > 126 + + When parsing the SCP protocol and verifying that the data looks like a + valid file name, byte values over 126 must not be consider illegal since + UTF-8 file names will use such codes. + + Reported by: Uli Zappe + Bug: http://www.libssh2.org/mail/libssh2-devel-archive-2010-08/0112.shtml + +Dan Fandrich (25 Aug 2010) +- Document the three sftp stat constants + +Guenter Knauf (18 Aug 2010) +- Fixed Win32 makefile which was now broken at resource build. + +- It is sufficient to pipe stderr to NUL to get rid of the nasty messages. + +- [Author: Guenter Knauf brought this change] + + Removed Win32 ifdef completely for sys/uio.h. + + No idea why we had this ifdef at all but MSVC, MingW32, Watcom + and Borland all have no sys/uio.h header; so if there's another + Win32 compiler which needs it then it should be added explicitely + instead of this negative list. + +- New files should also be added to Makefile.am. + + Otherwise they will never be included with release and snapshot tarballs ... + +Daniel Stenberg (18 Aug 2010) +- version: bump to 1.2.8_DEV + +Version 1.2.7 (17 Aug 2010) + +Daniel Stenberg (17 Aug 2010) +- release: updated to hold 1.2.7 info + +Guenter Knauf (17 Aug 2010) +- Use the new libssh2.rc file. + +- Added resource file for libssh2.dll (shamelessly stolen from libcurl). + +- Updated Win32 MSVC dependencies versions. + +- Added include for sys/select.h to get fd.set on some platforms. + +- Added Watcom makefile borrowed from libcurl. + + This makefile compiles already all files fine for static lib, but needs + final touch when I have OpenSSL fully working with shared libs and Watcom. + +- Added copyright define to libssh2.h and use it for binary builds. + +- Moved version defines up in order to include from .rc file. + + Blocked rest of header with ifndef so its possible to let + the rc compiler only use the version defines. + +- Some minor makefile tweaks. + +Daniel Stenberg (2 Aug 2010) +- example: treat the libssh2_channel_read() return code properly + + A short read is not an error. Only negative values are errors! + +- libssh2_wait_socket: reset error code to "leak" EAGAIN less + + Since libssh2 often sets LIBSSH2_ERROR_EAGAIN internally before + _libssh2_wait_socket is called, we can decrease some amount of + confusion in user programs by resetting the error code in this function + to reduce the risk of EAGAIN being stored as error when a blocking + function returns. + +- _libssh2_wait_socket: poll needs milliseconds + + As reported on the mailing list, the code path using poll() should + multiple seconds with 1000 to get milliseconds, not divide! + + Reported by: Jan Van Boghout + +- typedef: make ssize_t get typedef without LIBSSH2_WIN32 + + The condition around the ssize_t typedef depended on both LIBSSH2_WIN32 + *and* _MSC_VER being defined when it should be enough to depend on + _MSC_VER only. It also makes it nicer so libssh2-using code builds fine + without having custom defines. + +- [John Little brought this change] + + session_free: free more data to avoid memory leaks + +- channel_free: ignore problems with channel_close() + + As was pointed out in bug #182, we must not return failure from + _libssh2_channel_free() when _libssh2_channel_close() returns an error + that isn't EAGAIN. It can effectively cause the function to never go + through, like it did now in the case where the socket was actually + closed but socket_state still said LIBSSH2_SOCKET_CONNECTED. + + I consider this fix the right thing as it now also survives other + errors, even if making sure socket_state isn't lying is also a good + idea. + +- publickey_list_free: no return value from a void function + + Fixed a compiler warning I introduced previously when checking input + arguments more. I also added a check for the other pointer to avoid NULL + pointer dereferences. + +- [Lars Nordin brought this change] + + openssl: make use of the EVP interface + + Make use of the EVP interface for the AES-funktion. Using this method + supports the use of different ENGINES in OpenSSL for the AES function + (and the direct call to the AES_encrypt should not be used according to + openssl.org) + +Peter Stuge (23 Jun 2010) +- [Tor Arntsen brought this change] + + Don't overflow MD5 server hostkey + + Use SHA_DIGEST_LENGTH and MD5_DIGEST_LENGTH in memcpy instead of hardcoded + values. An incorrect value was used for MD5. + +- Fix message length bugs in libssh2_debug() + + There was a buffer overflow waiting to happen when a debug message was + longer than 1536 bytes. + + Thanks to Daniel who spotted that there was a problem with the message + length passed to a trace handler also after commit + 0f0652a3093111fc7dac0205fdcf8d02bf16e89f. + +- Make libssh2_debug() create a correctly terminated string + + Also use FILE *stderr rather than fd 2, which can very well be something + completely different. + +Daniel Stenberg (23 Jun 2010) +- [TJ Saunders brought this change] + + handshake: Compression enabled at the wrong time + + In KEXINIT messages, the client and server agree on, among other + things, whether to use compression. This method agreement occurs + in src/kex.c's kex_agree_methods() function. However, if + compression is enabled (either client->server, server->client, or + both), then the compression layer is initialized in + kex_agree_methods() -- before NEWKEYS has been received. + + Instead, the initialization of the compression layer should + happen after NEWKEYS has been received. This looks to occur + insrc/kex.c's diffie_hellman_sha1(), which even has the comment: + + /* The first key exchange has been performed, + + switch to active crypt/comp/mac mode */ + + There, after NEWKEYS is received, the cipher and mac algorithms + are initialized, and that is where the compression should be + initialized as well. + + The current implementation fails if server->client compression is + enabled because most server implementations follow OpenSSH's + lead, where compression is initialized after NEWKEYS. Since the + server initializes compression after NEWKEYS, but libssh2 + initializes compression after KEXINIT (i.e. before NEWKEYS), they + are out of sync. + + Reported in bug report #180 + +- [TJ Saunders brought this change] + + userauth_hostbased_fromfile: packet length too short + + The packet length calculated in src/userauth.c's + userauth_hostbased_fromfile() function is too short by 4 bytes; + it forgets to add four bytes for the length of the hostname. + This causes hostbased authentication to fail, since the server + will read junk data. + + verified against proftpd's mod_sftp module + +- _libssh2_userauth_publickey: reject method names longer than the data + + This functions get the method length by looking at the first 32 + bit of data, and I now made it not accept method lengths that are + longer than the whole data set is, as given in the dedicated + function argument. + + This was detected when the function was given bogus public key + data as an ascii string, which caused the first 32bits to create + a HUGE number. + +- NULL resistance: make more public functions survive NULL pointer input + + Sending in NULL as the primary pointer is now dealt with by more + public functions. I also narrowed the userauth.c code somewhat to + stay within 80 columns better. + +- agent: make libssh2_agent_userauth() work blocking properly + + previously it would always work in a non-blocking manner + +Peter Stuge (17 Jun 2010) +- Fix underscore typo for 64-bit printf format specifiers on Windows + + Commit 49ddf447ff4bd80285f926eac0115f4e595f9425 was missing underscores. + +Daniel Stenberg (16 Jun 2010) +- libssh2_session_callback_set: extended the man page + +- [John brought this change] + + LIBSSH2_DEBUG: macro uses incorrect function variable + + The LIBSSH2_DEBUG macro, defined in libssh2_priv.h, incorrectly uses the + function variable ssh_msg_disconnect when it should use ssh_msg_debug. + + This shows that the LIBSSH2_CALLBACK_DEBUG callback never has worked... + +- warning: fix a compiler warning 'pointer differs in signedness' + + As reported in bug #177 + +- portability: introduce LIBSSH2_INT64_T_FORMAT for 64bit printf()s + + As pointed out in bug #177, some of the Windows compilers use + %I64 to output 64 bit variables with the printf family. + +- debug: avoid sending NULL to sprintf %s + + Via the _libssh2_debug() macro/function. Pointed out by john in bug report + +- sftp docs: show macro on macro page, only function on function page + + The individual man pages for macros now show the full convenience + macro as defined, and then the man page for the actual function + only shows the function. + +- code police: make the code use less than 80 columns + +- libssh2_channel_write_ex: remove macros, added wording on buffer size + +- libssh2_sftp_write: document buffer size and changed some ordering + +- libssh2_channel_write_stderr: show how the macro is defined + +- libssh2_channel_write: show how the macro is defined + +- SFTP: limit write() to not produce overly large packets + + sftp_write() now limits how much data it gets at a time even more + than before. Since this function creates a complete outgoing + packet based on what gets passed to it, it is crucial that it + doesn't create too large packets. + + With this method, there's also no longer any problem to use very + large buffers in your application and feed that to libssh2. I've + done numerous tests now with uploading data over SFTP using 100K + buffers and I've had no problems with that. + +- scp_write_nonblock: add transfer time info + + Using the same timing logic and output format as + sftp_write_nonblock allows us to very easily run benchmarks on + SCP vs SFTP uploads using libssh2. + +- sftp_write_nonblock: select() on socket, use *BIG* buffer, time transfer + + The select() is just to make it nicer so that it doesn't + crazy-loop on EAGAIN. The buffer size thing is mostly to verify + that this really work as supposed. + + Transfer timing is just a minor thing, but it can just as well be + there and help us time and work on performance easier using out + of the box examples. + +- agent: use _libssh2_error() when returning errors + + As pointed out in bug report #173, this module basically never + used _libssh2_error() which made it work inconstently with other + parts of the libssh2 code base. This is my first take at making + this code more in line with the rest. + +- inputchecks: make lots of API functions check for NULL pointers + + If an application accidentally provides a NULL handle pointer to + the channel or sftp public functions, they now return an error + instead of segfaulting. + +- libssh2_channel_eof: clarify that it returns negative on errors + +- SFTP: keep the sftp error code as 32 bit + + 'last_errno' holds to the error code from the SFTP protocol and + since that is 32 bits on the wire there's no point in using a + long for this internally which is larger on some platforms. + +- agent: make the code better deal with unexpected code flows + + agent->ops gets initialized by the libssh2_agent_connect() call + but we need to make sure that we don't segfault even if a bad + sequence of function calls is used. + +Alexander Lamaison (10 Jun 2010) +- Better handling of invalid key files. + + Passing an invalid public key to libssh2_userauth_publickey_fromfile_ex + triggered an assertion. Replaced this with a runtime check that rejects + obviously invalid key data. + +Daniel Stenberg (10 Jun 2010) +- version: we start working on 1.2.7 now + +Version 1.2.6 (10 Jun 2010) + +Daniel Stenberg (10 Jun 2010) +- NEWS: add the 1.2.6 release details + +- RELEASE-NOTES: 1.2.6 details added + +Guenter Knauf (10 Jun 2010) +- fixed libssh2.dsw to use the generated libssh2.dsp; removed old *.dsp files. + +- moved MSVC strdup define to libssh2_config.h which we include already. + +- added missing source files to src/NMakefile. + +Daniel Stenberg (8 Jun 2010) +- libssh2_poll: refer to poll(3) and select(3) instead + +- example: fix strdup() for MSVC compiles + + MSVC has a _strdup() that we better use. This was reported in bug + +- SFTP: fail init SFTP if session isn't authenticated + + Alexander Lamaison filed bug #172 + (http://trac.libssh2.org/ticket/172), and pointed out that SFTP + init would do bad if the session isn't yet authenticated at the + time of the call, so we now check for this situation and returns + an error if detected. Calling sftp_init() at this point is bad + usage to start with. + +- direct_tcpip: bring back inclusion of libssh2_config.h + + In order to increase portability of this example, I'm bringing + the inclusion of libssh2_config.h back, and I also added an + require that header for this example to compile. + + I also made all code lines fit within 80 columns. + +Guenter Knauf (3 Jun 2010) +- cast away a warning. + +- moved CRT_SECURE_NO_DEPRECATE define up so its defined before the winsock headers are included. + +- fixed platform detection for MingW32 test makefile. + +- MingW32 has gettimeofday() implemented, so proper ifdef this function here. + +- removed MSVC ifdef since seems we can use __int64 still with latest headers. + +- changed copyright notice for MinW32 and NetWare binaries. + +- cleaned up MSVC ifdefs which where spreaded over 3 places. + +- added uint8_t typedef for NetWare CLIB platform. + +- if the function declaration gets changed the header should be changed too. + +- this is MSVC specific and doesnt apply for all Win32 compilers; + the uint8_t typedef clashes with MingW32 headers. + +- updated MingW32 makefiles for latest dependency lib versions. + +- updated NetWare makefiles for latest dependency lib versions. + +Dan Fandrich (30 May 2010) +- Fixed compiling with libgcrypt + + A change of parameter types from unsigned long to size_t was + missed in the prototype in libgcrypt.h + +Daniel Stenberg (28 May 2010) +- statvfs: use libssh2_sftp_statvfs only, no "_ex" + + As the long-term goal is to get rid of the extensive set of + macros from the API we can just as well start small by not adding + new macros when we add new functions. Therefore we let the + function be libssh2_sftp_statvfs() plainly without using an _ex + suffix. + + I also made it use size_t instead of unsigned int for the string + length as that too is a long-term goal for the API. + +- [Grubsky Grigory brought this change] + + DSP: output lib name typo + +- [Grubsky Grigory brought this change] + + win32: provide a uint8_t typedef for better building on windows + +- agent: win32: fix bad _libssh2_store_str call + + As pointed out by Grubsky Grigory , I + made a mistake when I added the _libssh2_store_str() call before + and I made a slightly different patch than what he suggested. + Based purely on taste. + +Peter Stuge (24 May 2010) +- [Joey Degges brought this change] + + Add libssh2_sftp_statvfs() and libssh2_sftp_fstatvfs() + + These can be used to get file system statistics from servers that + support the statvfs@openssh.com and fstatvfs@openssh.com extensions. + +Alexander Lamaison (22 May 2010) +- [Jose Baars brought this change] + + VMS specific: make sure final release can be installed over daily build + +- [Jose Baars brought this change] + + VMS: small improvement to the man2help utilities + +Peter Stuge (22 May 2010) +- [Joey Degges brought this change] + + libssh2_exit and libssh2_sftp_readdir man page fixes + +Daniel Stenberg (21 May 2010) +- spelling: s/sue/use + +Alexander Lamaison (21 May 2010) +- Change magic port number for generic knownhost check. + + libssh2_knownhost_checkp took 0 as a magic port number that indicated + a 'generic' check should be performed. However, 0 is a valid port + number in its own right so this commit changes the magic value to any + negative int. + +Mikhail Gusarov (5 May 2010) +- Add re-discovered copyright holders to COPYING + +- Restoring copyright statements from pre-git era + + Eli Fant has contributed fragmenting SFTP requests + +- Restoring my copyright statements from pre-git era + + keyboard_interactive, 'exit-status' information packet, non-atomic read/write + under FreeBSD, multi-channel operation bugfixes. + +Daniel Stenberg (3 May 2010) +- pedantic: make the code C90 clean + +Peter Stuge (3 May 2010) +- Do proper keyboard-interactive user dialog in the sftp.c example + +Daniel Stenberg (3 May 2010) +- added to tarball: libssh2_knownhost_checkp.3 + +- knownhost: support [host]:port in knownhost file + + OpenSSH has ways to add hosts to the knownhosts file that include + a specific port number which makes the key associated with only + that specific host+port pair. libssh2 previously did not support + this, and I was forced to add a new function to the API to + properly expose this ability to applications: + libssh2_knownhost_checkp() + + To *add* such hosts to the knownhosts file, you make sure to pass + on the host name in that manner to the libssh2_knownhost_addc() + function. + +- init/exit: mention these were added in 1.2.5 + +- libssh2_knownhost_check docs: correct the prototype + +- examples: avoid use of uninitialized variable 'sock' + +- KEX: stop pretending we negotiate language + + There was some stub-like parts of an implementation for + implementing kex language negotiation that caused clang-analyzer + to warn and as it did nothing I've now removed the dead code. + +- Uninitialized argument + +- sftpdir: removed dead assignment + +- Makefile.am: include the VMS-specific config header as well + +- [Jose Baars brought this change] + + Add VMS specific libssh2_config.h + +- fix Value stored to 's' is never read warning + + and moved variable declaration of s to be more local + +- kexinit: simplify the code and avoid scan-build warning + + Previously it would say "Value stored to 's' is never read" due + fourth increment of 's'. + +Alexander Lamaison (28 Apr 2010) +- Removed unecessary brackets. + +- Changed sftp_attrsize macro to a static function. + +Daniel Stenberg (28 Apr 2010) +- release: include the VMS-specific files + +- sftp_attrsize: protect the macro argument with proper parentheses + +- ssh2_agent: avoid using 'session' uninitialized on failures + +- examples: remove assignments of variable rc that's never used + +- publickey_init: remove useless variable increment + +- hostkey_method_ssh_rsa_init: remove useless variable increment + +- packet_x11_open: removed useless variable increment + + and made the declaration of a variable more local + +- packet_queue_listener: removed useless variable increment + + and made the declaration of a variable more local + +- sftp_read: move a read_responses array to where its used + + I find that this increases readability since the array is used + only in the function call just immediately below and nowhere + else. + +- sftp_readdir: turn a small array static const and move it + +- sftp_attrsize: converted function to a macro + + This way, the macro can evaluate a static number at compile time + for two out of four uses, and it probably runs faster for the + other two cases too. + +- sftp_open: deal with short channel_write calls + + This was an old TODO that just wasn't done before. If + channel_write returns short, that is not an error. + +- sftp_open: clean up, better check of input data + + The clang-analyzer report made it look into this function and + I've went through it to remove a potential use of an + uninitialized variable and I also added some validation of input + data received from the server. + + In general, lots of more code in this file need to validate the + input before assuming it is correct: there are servers out there + that have bugs or just have another idea of how to do the SFTP + protocol. + +- bugfix: avoid using the socket if it failed to create one + +- bugfix: potential use of NULL pointer + +- libssh2_userauth_password_ex: clarify errors somewhat + + The errors mentioned in this man page are possible return codes + but not necessarily the only return codes that this can return. + + Also reformatted the typ prototypes somewhat. + +- examples: fixed and made them more similar + + The channel read/write functions can return 0 in legitimate cases + without it being an error, and we need to loop properly if they + return short. + +- [Jose Baars brought this change] + + VMS port of libssh2; changes in the libssh2 common code + +- Makefile: added the two news headers userauth.h and session.h + +- cleanup: prefer the internal functions + + To get the blocking vs non-blocking to work as smooth as possible + and behave better internally, we avoid using the external + interfaces when calling functions internally. + + Renamed a few internal functions to use _libssh2 prefix when not + being private within a file, and removed the libssh2_ for one + that was private within the file. + +- session_free: remove dead code + +- libssh2_publickey_init: fixed to work better non-blocking + + This was triggered by a clang-analyzer complaint that turned out + to be valid, and it made me dig deeper and fix some generic non- + blocking problems I disovered in the code. + + While cleaning this up, I moved session-specific stuff over to a + new session.h header from the libssh2_priv.h header. + +- channel: reduce duplicated free and returns + + Simplified the code by trying to free data and return on a single + spot. + +- channel: make variables more local + + By making 'data' and 'data_len' more local in several places in + this file it will be easier to spot how they are used and we'll + get less risks to accidentally do bad things with them. + +Mikhail Gusarov (24 Apr 2010) +- Fix typos in manpages, catched by Lintian + +Daniel Stenberg (24 Apr 2010) +- channel_request_pty: simplify the code + + clang-analyzer pointed out how 'data' could be accessed as a NULL + pointer if the wrong state was set, and while I don't see that + happen in real-life the code flow is easier to read and follow by + moving the LIBSSH2_FREE() call into the block that is supposed to + deal with the data pointer anyway. + +- libssh2_channel_process_startup: simplify the code + + clang-analyzer pointed out how 'data' could be accessed as a NULL + pointer if the wrong state was set, and while I don't see that + happen in real-life the code flow is easier to read and follow by + moving the LIBSSH2_FREE() call into the block that is supposed to + deal with the data pointer anyway. + +- sftp_close_handle: add precation to not access NULL pointer + + clang-analyzer pointed this out as a "Pass-by-value argument in + function call is undefined" but while I can't see exactly how + this can ever happen in reality I think a little check for safety + isn't such a bad thing here. + +- scp_write_nonblock: Value stored to 'nread' is never read + +- scp_write: Value stored to 'ptr' is never read + +- scp_write_nonblock: Value stored to 'ptr' is never read + +- sftp_mkdir: less silly output but show failures + +- [Jose Baars brought this change] + + VMS port of libssh2 including VMS specific build procedures + +- two variable types changes, made lines less than 80 columns + + The two variable type changes are only to match type variable + fields actually read from the binary protocol. + +- remove check for negative padding_length + + It was silly, since it is read as an unsigned char... + +- hostkey_method_ssh_dss_init: Value stored to 's' is never read + +- libssh2_banner_set: avoid unnecessary increment and explain code + +- agent_transact_unix: remove unused variable + +- remove two unnecessary increments + +- more code converted to use _libssh2_store_*() + +- libssh2_publickey_list_fetch: removed unused variables + +- libssh2_publickey_init: remove unused variables + +- libssh2_scp_send64: added to API to provide large file transfers + + The previously existing libssh2_scp_send_ex() function has no way + to send files that are larger than 'size_t' which on 32bit + systems mean 4GB. This new API uses a libssh2_int64_t type and + should thus on most modern systems be able to send enormous + files. + +- sftp_init: remove unused variables and assignments + +- libssh2_knownhost_check: Value stored to 'keylen' is never read + +- hostkey: fix compiler warning + +- remove unused variable + +- data types: convert more to use size_t and uint32_t + +- channel: variable type cleanups + +- cleanups: better binary packet gen, size_t fixes and PACKET_* removal + + I'll introduce a new internal function set named + + _libssh2_store_u32 + _libssh2_store_u64 + _libssh2_store_str + + That can be used all through the library to build binary outgoing + packets. Using these instead of the current approach removes + hundreds of lines from the library while at the same time greatly + enhances readability. I've not yet fully converted everything to + use these functions. + + I've converted LOTS of 'unsigned long' to 'size_t' where + data/string lengths are dealt with internally. This is The Right + Thing and it will help us make the transition to our + size_t-polished API later on as well. + + I'm removing the PACKET_* error codes. They were originally + introduced as a set of separate error codes from the transport + layer, but having its own set of errors turned out to be very + awkward and they were then converted into a set of #defines that + simply maps them to the global libssh2 error codes instead. Now, + I'l take the next logical step and simply replace the PACKET_* + defines with the actual LIBSSH2_ERROR_* defines. It will increase + readability and decrease confusion. + + I also separated packet stuff into its own packet.h header file. + +- clarified the return code + +- rename libssh2_error to the correct _libssh2_error + + We reserve ^libssh2_ for public symbols and we use _libssh2 as + prefix for internal ones. I fixed the intendation of all these + edits with emacs afterwards, which then changed it slightly more + than just _libssh2_error() expressions but I didn't see any + obvious problems. + +- data type cleanup: made lots of code use size_t etc + + A lot of code used 'unsigned long' and the likes when it should + rather just use plain 'int' or use size_t for data lengths. + +- wait_socket: make c89 compliant and use two fd_sets for select() + +- sftp_readdir: always zero terminate, detail the return code + + I also added a description for the 'longentry' field which was + previously undocumented! + +- sftp_readdir: simplified and bugfixed + + This function no longer has any special purpose code for the + single entry case, as it was pointless. + + The previous code would overflow the buffers with an off-by-one + in case the file name or longentry data fields received from the + server were exactly as long as the buffer provided to + libssh2_sftp_readdir_ex. + + We now make sure that libssh2_sftp_readdir_ex() ALWAYS zero + terminate the buffers it fills in. + + The function no longer calls the libssh2_* function again, but + properly uses the internal sftp_* instead. + +- channel/transport: we now drain the outgoing send buffer when we ignore EAGAIN + + When we ignore the EAGAIN from the transport layer within channel_write, we + now drain the outgoing transport layer buffer so that remainders in that + won't cause any problems in the next invoke of _libssh2_transport_write() + +- channel_write: if data has been sent, don't return EAGAIN + + When sending data in a loop, we must not return EAGAIN if we + managed to send data in an earlier round. This was reported in + bug #126 => http://libssh2.stuge.se/ticket/126 + +Simon Josefsson (14 Apr 2010) +- Fix OpenSSL AES-128-CTR detection. + + Patch from Paul Howarth . + +Daniel Stenberg (13 Apr 2010) +- version in header file now says 1.2.6-DEV + +- 1.2.6: clean the RELEASE-NOTES for next release round + +- NEWS: add the stuff from the version 1.2.5 RELEASE-NOTES + +Version 1.2.5 (13 Apr 2010) + +Daniel Stenberg (13 Apr 2010) +- channel_close: no longer wait for the SSH_MSG_CHANNEL_CLOSE message + + As the packet may simply not arrive we cannot have the close + function wait for it unconditionally. + +- less code duplication in the poll vs select code flows + + libssh2_keepalive_send and libssh2_session_block_directions are + now used outside of the #ifdef blocks. + +- make it C90 compliant + +- updated with all changes and bugs since 1.2.4 + +- Added LIBSSH2_SFTP_S_IS***() macros and updated docs + + libssh2_sftp_fstat_ex.3 is now extended quite a lot to describe a + lot of the struct and the bits it uses and how to test for them. + +- sftp_init() deal with _libssh2_channel_write() returns short + + When _libssh2_channel_write() is asked to send off 9 bytes, the + code needs to deal with the situation where less than 9 bytes + were sent off and prepare to send the remaining piece at a later + time. + +- handle a NULL password as if it was "" + + libssh2_userauth_publickey_fromfile_ex() takes a "passphrase" + but didn't deal with it being set to NULL. + +- Reduce used window sizes by factor 10 + + As reported in bug report #166 http://libssh2.stuge.se/ticket/166 + by 'ptjm', the maximum window size must be less crazy for libssh2 + to do better with more server implementations. I did not do any + testing to see how this changes raw SCP performance, but the + maximum window size is still almost 4MB. This also has the upside + that libssh2 will use less memory. + +Peter Stuge (28 Mar 2010) +- Correctly clear blocking flag after sending multipart packet + + commit 7317edab61d2179febc38a2c2c4da0b951d74cbc cleared the outbound + blocking bit when send_existing() returned PACKET_NONE and *ret=0, as + opposed to before even calling send_existing(), but because *ret=1 when + sending parts 2..n of an existing packet, the bit would only be cleared + when calling libssh2_transport_write() for a new packet. + + Clear the direction flag after the final part of a packet has been sent. + +Daniel Stenberg (24 Mar 2010) +- Added man page for libssh2_knownhost_addc() + + Added mention in libssh2_knownhost_add() docs that + libssh2_knownhost_addc() is the preferred function now. + +- at next soname bump remove libssh2_knownhost_add() + +- ignore TAGS ("make tags" makes them) + +- fix memory leak + + we must not assign the pointer a NULL since it keeps allocated + data and at least parts of an error string + +- fixed the pattern for avoiding the poll check + + added some comments about known problems with poll on darwin + +- avoid checking for poll on some systems + + darwin and interix are known to have broken poll implementations + so we skip the check on those and thus have them use select + unconditionally + +- ignore libssh2.dsp + +Simon Josefsson (23 Mar 2010) +- Fix logic in "on-the-fly" crypto init. + +- Make sure keepalive is working even when poll is used. + +- [Paul Querna brought this change] + + Use poll when available on blocking API. + + Signed-off-by: Simon Josefsson + +Peter Stuge (20 Mar 2010) +- Fix speling + +Daniel Stenberg (19 Mar 2010) +- fix NULL dereference when window adjusting a non-existing channel + + Suyog Jadhav pointed out that when receiving a window adjust to + a channel not found, the code would reference a NULL pointer. + Now it will instead output a message about that fact. + +Simon Josefsson (19 Mar 2010) +- Fix build problem. + +- Eat our own dog food, call libssh2_init and libssh2_exit in the examples. + +- Fix init/exit logic. Add self-test of it. + +Daniel Stenberg (19 Mar 2010) +- fix typo + +Simon Josefsson (19 Mar 2010) +- Add man page for libssh2_init and libssh2_exit. Fix libssh2_exit prototype. + +- Shorten constant a bit. More documentation. + +- Fix namespace pollution. + +- Add global init/exit points, to do crypto initialization in one place. + + By Lars Nordin. + +Daniel Stenberg (14 Mar 2010) +- libssh2 is released under the Modifed BSD license, not GPL + +Alexander Lamaison (14 Mar 2010) +- Add libssh2_knownhost_addc to handle comments. + + Comments in known_hosts file were not handle properly. They were parsed as + part of the key causing key matching to return a mismatch if the entry had a + comment. This adds a new API function that takes an optional comment and + changes libssh2_knownhost_readline to parse the comment as pass it to the + new function. + + Fixes #164. + +- Fix gettimeofday to compile with Visual C++ 6. + + Reported by Steven Van Ingelgem. + +Simon Josefsson (10 Mar 2010) +- Add. + +- keepalive.c: Fix libssh2_error usage. + +- Fix typo in last commit. + +- Tidy up build option notice. + +- Add entry about keep alive stuff. + +- Add keep-alive support. + +Alexander Lamaison (7 Mar 2010) +- Untabify. + +- Fix memory leak in libssh2_knownhost_add. + +Daniel Stenberg (6 Mar 2010) +- change 'int' to 'libssh2_socket_t' in the public API for sockets + +- reduce code duplication and return underlying error better + +- acknowledge when _libssh2_packet_requirev() returns error + + when _libssh2_packet_requirev() returns an error when waiting for + SSH_MSG_USERAUTH_SUCCESS or SSH_MSG_USERAUTH_FAILURE, it is an + error and it should be treated as such + +- wrap long lines + +- polished the phrasing in two error strings + +- silence picky compiler warnings + +- silence picky compiler warnings + +- removed libssh2_error()'s forth argument + + libssh2_error() no longer allocates a string and only accepts a const + error string. I also made a lot of functions use the construct of + return libssh2_error(...) instead of having one call to + libssh2_error() and then a separate return call. In several of those + cases I then also changed the former -1 return code to a more + detailed one - something that I think will not change behaviors + anywhere but it's worth keeping an eye open for any such. + +- repaired --enable-debug + +Simon Josefsson (1 Mar 2010) +- Make ./configure output a summary of build options. + +Daniel Stenberg (1 Mar 2010) +- let the err_msg in the session struct be const too + +Simon Josefsson (1 Mar 2010) +- Revert #ifdef change that pulled in AES-CTR code when explicitly disabled. + +Daniel Stenberg (1 Mar 2010) +- fix #ifdefs + +- make function match the new proto + +Simon Josefsson (1 Mar 2010) +- Improve AES-CTR check. + +Daniel Stenberg (1 Mar 2010) +- use const to silence a bazillion warnings + +Simon Josefsson (1 Mar 2010) +- Use AES-CTR from OpenSSL when available. + + Reported by Lars Nordin . + +- Make it possible to disable DSA. + + Patch from Lars Nordin . + +Peter Stuge (1 Mar 2010) +- Send and receive channel EOF before sending SSH_MSG_CHANNEL_CLOSE + + Sending SSH_MSG_CHANNEL_CLOSE without channel EOF is explicitly allowed + in RFC 4254, but some non-conforming servers will hang or time out when + the channel is closed before EOF. + + Other common clients send and receive EOF before closing, there are no + drawbacks, and some servers need it to work correctly. + +Alexander Lamaison (26 Feb 2010) +- Style improvements to knownhost error handling. + + Made improvements as suggested by Peter Stuge: http://www.libssh2.org/mail/libssh2-devel-archive-2010-02/0161.shtml. + +- Call libssh2_error for every knownhost API failure. + + The libssh2 API calls should set the last error code and a message when + returning a failure by calling libssh2_error. This changeset adds these + calls to the libssh2_knownhost_* API as well as libssh2_base64_decode. + + This change also makes libssh2_error into a function rather than a macro. + Its implementation is moved to misc.c. This function returns the error + code passed to it allowing callers to return the error value directly + without duplicating the error code. + +- Fix LIBSSH2_ALLOC checks. + + These appear to be cut-and paste errors where the wrong variable is checked + for NULLness after calling LIBSSH2_ALLOC. + +Simon Josefsson (23 Feb 2010) +- Silence compiler warning. + +- Make it portable; test uses = for string comparison (not ==). Indent. + +Alexander Lamaison (22 Feb 2010) +- libssh2_knownhost_del: fix write to freed memory. + + When removing a known host, libssh2_knownhost_del would remove the node from the linked list, free its memory and then overwrite the struct parameter (which indicated which node to remove) with 0. However, this struct is actually allocated within the just-freed node meaning we're writing to freed memory. This made Windows very upset. + + The fix is simply to overwrite the struct first before freeing the memory. + +Daniel Stenberg (21 Feb 2010) +- show more verbose error when SCP send fails + +- libssh2_socket_t is done, a library-free function is needed + +- clarify that this frees all data associated with a session + +- improved error handling + +- add missing libssh2_error() calls + + To make sure the public API is functional and that the + BLOCK_ADJUST_ERRNO() macro works correctly we MUST make sure to + call libssh2_error() when we return errors. + +- fix memory leak in userauth_keyboard_interactive() + + Mr anonymous in bug #125 pointed out that the userauth_keyboard_interactive() + function does in fact assign the same pointer a second time to a new allocated + buffer without properly freeing the previous one, which caused a memory leak. + +- added missing error codes + + To allow the libssh2_session_last_error() function to work as + documented, userauth_password() now better makes sure to call + libssh2_error() everywhere before it returns error. + + Pointed out by mr anonymous in bug #128 + +Peter Stuge (16 Feb 2010) +- Fix resource and memory leaks in examples as reported by cppcheck + + Thanks to Ettl Martin for the report and patch. This fixes #132 + +Daniel Stenberg (15 Feb 2010) +- mention the new man pages for macros + +- added man pages for API macros + + all #defined macros in the public headers are considered to be part + of the API and I've generated individual man pages for each of them + to A) make it easier to figure out what each function/macro actually + is for so that automated lookups work better and for B) make sure we + have all public functions document (both macros and functions) to + make it easier for us to work away from all the macros in a future + release. + +- Committed the patch by Yoichi Iwaki in bug #2929647 + + Committed the patch by Yoichi Iwaki in bug #2929647, which fixed a memory + leak when an 'outbuf' was still allocated when a session was freed. + +- free "outbuf" when killing a session + + Fix memoary leak: if there was an "output" still allocated when a + session was torn down it needs to be freed in session_free() + + Patch by Yoichi Iwaki in bug #2929647 + +- the working version name is now 1.2.5_DEV + +Version 1.2.4 (13 Feb 2010) + +Daniel Stenberg (13 Feb 2010) +- updated info for 1.2.4 + +Dan Fandrich (10 Feb 2010) +- Allow compiling with OpenSSL when AES isn't available. + +Peter Stuge (9 Feb 2010) +- [Dave McCaldon brought this change] + + Fix Tru64 socklen_t compile issue with example/direct_tcpip.c + + Building libssh2-1.2.3 on Tru64 fails at line 48 and 166 because socklen_t + isn't defined on Tru64 unless _POSIX_PII_SOCKET is defined. + + This patch updates configure.ac to add -D_POSIX_PII_SOCKET when building + on Tru64 platform(s). + +- [Dave McCaldon brought this change] + + Resolve compile issues on Solaris x64 and UltraSPARC + + Solaris builds of libssh2-1.2.3 failed on both x64 and UltraSPARC + platforms because of two problems: + + 1) src/agent.c:145 sun is a reserved word when using the SUNWspro compiler + 2) example/direct_tcpip.c:84 INADDR_NONE is not defined + +Daniel Stenberg (3 Feb 2010) +- towards 1.2.4 now + +Version 1.2.3 (3 Feb 2010) + +Daniel Stenberg (3 Feb 2010) +- Version 1.2.3 (February 3, 2010) + +- fix building out of source tree by proving better include path + + when building out of source tree, we provide -I$(top_builddir)/example + since the libssh2_config.h gets generated in that dir + +Peter Stuge (1 Feb 2010) +- [Sofian Brabez brought this change] + + Replace : in hexdump with " " (two spaces) + +- Detect when the forwarded connection is closed in example/direct_tcpip.c + +- Fix example/direct_tcpip.c to work also on WIN32 + + read() and write() are no good for WIN32 sockets, use recv() and send(). + +- Ignore libssh2_config.h.in and stamp-h2 in example/ and remove .cvsignore + +- Simplify WIN32 ifdefs in example/direct_tcpip.c to allow standalone compile + +- Always #define INVALID_SOCKET -1 in libssh2_priv.h when not on win32 + + Fix broken builds since commit abd9bd0bbe631efeada1f54552c70b54e1c490c1 + for all non-win32 platforms. + +- Include hmac-md5 and hmac-md5-96 only if crypto backend supports MD5 + +- Use LIBSSH2_HOSTKEY_HASH_SHA1 instead of _MD5 in examples and tests + + MD5 support is optional and may not always be available, while SHA1 is both + required and recommended. + +- Update mailing list address in configure.ac to @cool.haxx.se + +- Make example/direct_tcpip.c compile for win32 + + One warning from FD_SET() remains, it is also in some other examples. + +- Correctly check for an invalid socket in session_startup() + +- Small documentation fix after Dave's _USERAUTH_FAILURE improvement + +- [Dave McCaldon brought this change] + + Handle SSH_MSG_USERAUTH_FAILURE for password and kbd-int authentication + + Neither libssh2_userauth_password_ex() nor + libssh2_userauth_keyboard_interactive_ex() would return a login failure + error if the server responded with a SSH_MSG_USERAUTH_FAILURE, instead + you would see whatever previous error had occurred, typically + LIBSSH2_ERROR_EAGAIN. + + This patch changes error code -18 to LIBSSH2_ERROR_AUTHENTICATION_FAILED + and makes LIBSSH2_ERROR_PUBLICKEY_UNRECOGNIZED an alias for + LIBSSH2_ERROR_AUTHENTICATION_FAILED. In addition, new logic in + userauth_password() properly handles SSH_MSG_USERAUTH_FAILURE and both + this function and userauth_keyboard_interactive() now properly return + LIBSSH2_ERROR_AUTHENTICATION_FAILED. + +Simon Josefsson (28 Jan 2010) +- Fix. + +- Also deal with GLOBAL_REQUEST keep-alives. + +- Make OpenSSH-style keepalive work against libssh2 clients. + +Daniel Stenberg (27 Jan 2010) +- clarified + +Peter Stuge (26 Jan 2010) +- [Dave McCaldon brought this change] + + Fix trace context lookup in libssh2_debug() + + The trace context is actually a bitmask so that tracing output can be + controlled by setting a bitmask using libssh2_trace(). However, the logic + in libssh2_debug() that converted the context to a string was using the + context value as an array index. Because the code used a bounds check on + the array, there was never a danger of a crash, but you would certainly + either get the wrong string, or "unknown". + + This patch adds a lookup that iterates over the context strings and uses + it's index to check for the corresponding bit in the context. + +- Fix typo in RELEASE-NOTES + +Daniel Stenberg (20 Jan 2010) +- updated for 1.2.3 with all the stuff I found in the log + +- ignore more generated files + +- [Dave McCaldon brought this change] + + Pass user context through libssh2_trace_sethandler() to callback + + The libssh2_trace_sethandler() call allows the user to handle the output of libssh2 rather than having it written to stderr. This patch updates libssh2_trace_sethandler() to allow a user-defined void* context value to be passed back to the output handler. + +- [Dave McCaldon brought this change] + + Add libssh2_trace_sethandler() to the API (even more) + +- [Dave McCaldon brought this change] + + Add libssh2_trace_sethandler() to the API + +- cleanup includes + + We now produce a local libssh2_config.h file in this dir for the + examples to use so I cleaned up the include path at the same time. + +- generate a libssh2_config.h in the example dir + + buildconf copies the template to example/ and configure makes sure + to generate a proper file from it and the direct_tcpip.c example + is the first one to use it - to make sure it builds fine on more + paltforms + +Simon Josefsson (13 Jan 2010) +- Remove redundant #includes and reorder sys/types.h include. + +Daniel Stenberg (10 Jan 2010) +- avoid a free(NULL) + +Simon Josefsson (7 Jan 2010) +- Make it simpler to get more debug info. + +Daiki Ueno (1 Jan 2010) +- Simplify the commit 63457dfa using type cast from size_t * to ulong *. + +Alexander Lamaison (30 Dec 2009) +- Fixed memory leak in userauth_publickey(). + + userauth_publickey_fromfile() reads the key from a + file using file_read_publickey() which returns two + allocated strings, the decoded key and the key + method (such as "ssh-dss"). The latter can be + derived from the former but returning both avoids a + later allocation while doing so. + + Older versions of userauth_publickey_fromfile() used + this method string directly but when + userauth_publickey() was factored out of + userauth_publickey_fromfile() it derived the method + from the key itself. This resulted in the method + being allocated twice. + + This fix, which maintains the optimisation that + avoids an extra allocation, changes + userauth_publickey() so it doesn't allocate and + derive the method when userauth_pblc_method already + has a value. + + Signed-off-by: Alexander Lamaison + +Daiki Ueno (25 Dec 2009) +- Fix the return value description of libssh2_knownhost_free(). + +- Fix compiler warnings for size_t pointers on 32-bit Windows. + +- Define INVALID_SOCKET and use it instead of SOCKET_BAD. + + Revert the part of previous commit that defines SOCKET_BAD library wide. + +- Use libssh2_socket_t in the ssh-agent stuff. + Define a portability macro SOCKET_BAD which means "invalid socket". + +- Mark/unmark connection to Pageant is open/close. + +- Add test to check if the socket is connected. + +Peter Stuge (24 Dec 2009) +- Add libssh2.pc to top-level .gitignore + +- Fix publickey authentication regression + + Commit 70b199f47659a74b8778c528beccf893843e5ecb introduced a parsing + bug in file_read_publickey() which made the algorithm name contain an + extra trailing space character, breaking all publickey authentication. + +- Add a direct-tcpip example which shows local port forwarding + +- Add session parameter and LIBSSH2_TRACE_SOCKET to libssh2_trace(3) man page + +- Add TODO: Expose error messages sent by the server + +Daiki Ueno (23 Dec 2009) +- Fix doc comments. + +- Add man pages for ssh-agent API. + +- Don't request userauthlist after authentication. + +Simon Josefsson (21 Dec 2009) +- Add. + +- [Daiki Ueno brought this change] + + Add an example to use ssh-agent API. + + Signed-off-by: Simon Josefsson + +- [Daiki Ueno brought this change] + + Add ssh-agent API. + + Signed-off-by: Simon Josefsson + +- [Daiki Ueno brought this change] + + Add callback-based API for publickey auth. + + Signed-off-by: Simon Josefsson + +- Move examples from example/simple to example/. + +- Move examples from example/simple to example/. + +Daniel Stenberg (17 Dec 2009) +- _libssh2_list_insert() fixed to work + + While this is code not currently in use, it is part of the generic linked + list code and since I found the error I thought I'd better fix it since we + might bring in this function into the code one day. + +Simon Josefsson (16 Dec 2009) +- Silence compiler warnings. + + Based on patch by Kamil Dudka in + . + +- [Kamil Dudka brought this change] + + libgcrypt: simplify code of _libssh2_dsa_sha1_sign + + Signed-off-by: Simon Josefsson + +- [Kamil Dudka brought this change] + + libgcrypt: follow-up for ssh-dss padding fix + + Signed-off-by: Simon Josefsson + +Dan Fandrich (15 Dec 2009) +- Check for the right environment variable in the test app + +Simon Josefsson (14 Dec 2009) +- Silence warning about unused function parameter. + + Reported by Steven Van Ingelgem . + +Daniel Stenberg (10 Dec 2009) +- avoid returning data to memory already freed + + In case of failure we must make sure that the data we return + doesn't point to a memory area already freed. Reported anonymously + in the bug report #2910103. + +Peter Stuge (8 Dec 2009) +- Use LIBSSH2_TRACE_* internally and remove redundant LIBSSH2_DBG_* + +- Add LIBSSH2_TRACE_SOCKET context for tracing send() and recv() + + Helpful in debugging the -39 errors. + +- Another transport layer fix for bogus -39 (LIBSSH2_ERROR_BAD_USE) errors + + Commit 683aa0f6b52fb1014873c961709102b5006372fc made send_existing() send + more than just the second part of a packet when the kernel did not accept + the full packet, but the function still overlooked the SSH protocol + overhead in each packet, often 48 bytes. + + If only the last few bytes of a packet remained, then the packet would + erroneously be considered completely sent, and the next call to write + more data in the session would return a -39 error. + +Daniel Stenberg (6 Dec 2009) +- move local variable to be more localized + +- fixed some indent mistakes + +Peter Stuge (6 Dec 2009) +- Fix padding in ssh-dss signature blob encoding + + DSA signatures consist of two 160-bit integers called r and s. In ssh-dss + signature blobs r and s are stored directly after each other in binary + representation, making up a 320-bit (40 byte) string. (See RFC4253 p14.) + + The crypto wrappers in libssh2 would either pack r and s incorrectly, or + fail, when at least one integer was small enough to be stored in 19 bytes + or less. + + The patch ensures that r and s are always stored as two 160 bit numbers. + +- Don't always clear write direction blocking flag + + When libssh2_transport_write() is called to continue sending a + partially sent packet the write direction flag must not be cleared + until the previous packet has been completely sent, or the app would + hang if the packet still isn't sent completely, since select() gets + called by the internal blocking emulation layer in libssh2 but would + then not be watching the socket for writability. + + Clear the flag only once processing of previous packet data is + complete and a new packet is about to be prepared. + +Alexander Lamaison (24 Nov 2009) +- Detabify. + +- [Daniel Stenberg brought this change] + + Fixed memory leak in sftp_fstat(). + +Simon Josefsson (17 Nov 2009) +- Mark date of 1.2.2 release. + +- Merge branch 'master' of ssh://git.stuge.se/var/lib/git/libssh2 + +Version 1.2.2 (16 Nov 2009) + +Daniel Stenberg (16 Nov 2009) +- prepared for 1.2.2 + +Simon Josefsson (16 Nov 2009) +- Improve NEWS items. + +- Support AES-Counter ciphers. + +- Silence compiler warning. + + Reported by Steven Van Ingelgem + in . + +- Mention libssh2-style.el. + +- Use memmove instead of memcpy on overlapping memory areas. + + Reported by Bob Alexander in + . + +- Add. + +- Protect against crash on too small SSH_MSG_IGNORE packets. + + Reported by Bob Alexander + in . + +- add copyright line + +- support arcfour128 cipher per RFC 4345 + +Daniel Stenberg (21 Oct 2009) +- [Cristian Rodríguez brought this change] + + add support for GCC visibility features + +Simon Josefsson (19 Oct 2009) +- less hard coding of cipher mode in libgcrypt backend + +Daniel Stenberg (18 Oct 2009) +- [Juzna brought this change] + + libssh2_channel_forward_accept() and listening fixes + + The forward accepting was not done right before, and the + packet_queue_listener function didn't assign a necessary + variable. All fixed by Juzna. I (Daniel) modified the + forward_accept() change somewhat. + +- added man page for libssh2_knownhost_free + +- more CVS => DEV conversions + +- remove references to "CVS" + +- we are on the 1.2.2 track nowadays + +Peter Stuge (17 Oct 2009) +- Ensure that win32/libssh2.dsp will be generated with CRLF line endings + + First, win32/msvcproj.{head,foot} are now committed with CRLF line endings, + and .gitattributes specifies that these should not be changed on checkout or + commit. These are win32 files so it makes sense to store them with native + line endings. + + Second, the rules for generating libssh2.dsp and libssh2.vcproj are changed + so that the full file contents passes through awk, which strips all CR and + then prints each line with one CRLF line ending. Stripping CR is important + to avoid CRCRLF in case the input already comes with CRLF. + +Dan Fandrich (29 Sep 2009) +- Make sure permissions on the private host key file is tight enough that + sshd doesn't complain. Quote $srcdir to try to cope with embedded spaces. + +Peter Stuge (30 Sep 2009) +- Clarify the scp_write examples slightly and use an octal mask for the mode + +Version 1.2.1 (29 Sep 2009) + +Daniel Stenberg (29 Sep 2009) +- 1.2.1 preparations + +- remove ChangeLog and win32/libssh2.dsp only at make distclean + +Simon Josefsson (23 Sep 2009) +- Fix shell usage. + +Daniel Stenberg (22 Sep 2009) +- clarify that the paths are the remote ones + +- let hpux systems build with _REENTRANT defined as well + +- updated to match reality + + I went over the commit log and added what I believe is all notable + changes and bugfixes since the 1.2 release + +Peter Stuge (20 Sep 2009) +- Fix scp examples to loop correctly over libssh2_channel_write() + +- Fix transport layer bug causing invalid -39 (LIBSSH2_ERROR_BAD_USE) errors + + The channel layer sends packets using the transport layer, possibly + calling _libssh2_transport_write() many times for each packet. + + The transport layer uses the send_existing() helper to send out any + remaining parts of previous packets before a new packet is started. + + The bug made send_existing() consider the entire packet sent as soon as it + successfully sent the second part of a packet, even if the packet was not + completely done yet. + +Daniel Stenberg (19 Sep 2009) +- [Neil Gierman brought this change] + + Added gettimeofday() function for win32 + + Neil Gierman's patch adds a gettimeofday() function for win32 + for the libssh2_trace() functionality. The code originates from + cygwin and was put in the public domain by the author + Danny Smith + +- libssh2_channel_read_ex() must return 0 when closed + + If the channel is already at EOF or even closed at the end of the + libssh2_channel_read_ex() function and there's no data to return, + we need to signal that back. We may have gotten that info while + draining the incoming transport layer until EAGAIN so we must not + be fooled by that return code. + +Dan Fandrich (16 Sep 2009) +- Ignore tags file + +- Some platforms need sys/socket.h included before netinet/in.h. + Fixed an unused variable compiler warning. + +Daniel Stenberg (16 Sep 2009) +- libssh2_channel_wait_closed() could hang + + libssh2_channel_wait_closed() had a bad loop waiting for the + channel to close, as it could easily miss the info and then if + the socket would be silent from that moment the funtion would + hang if in blocking-mode or just return EAGAIN wrongly to the + app. The drain-transport loop now correctly checks if the close + has arrived. + +- make libssh2_scp_send/recv do blocking mode correctly + + Somehow I had completely missed to make the libssh2_scp_send/recv + functions support the blocking mode the correct way so when I + cleaned up things the other day blocking mode broke for them... + Fixed now. + +Guenter Knauf (8 Sep 2009) +- changed NetWare test makefile static link order + +- removed conditional sources from Makefile.inc + added ifdef blocks to libgcrypt.c, pem.c, openssl.c + +Simon Josefsson (7 Sep 2009) +- Fix compilation errors. + +Guenter Knauf (6 Sep 2009) +- some more NetWare build fixes. + +- some more NetWare makefile fixes. + +- added sys/select.h include to samples for fd_set on NetWare. + +- accept CFLAGS from environment. diff --git a/vendor/libssh2/NMakefile b/vendor/libssh2/NMakefile new file mode 100644 index 000000000..07bc2ddad --- /dev/null +++ b/vendor/libssh2/NMakefile @@ -0,0 +1,33 @@ +!include "win32/config.mk" + +!if "$(WITH_WINCNG)" == "1" +!include "Makefile.WinCNG.inc" +!else +!include "Makefile.OpenSSL.inc" +!endif +!include "Makefile.inc" + +OBJECTS=$(CSOURCES:.c=.obj) + +# SUBDIRS=src example +SUBDIRS=src + +all-sub: win32\objects.mk + -for %D in ($(SUBDIRS)) do $(MAKE) /nologo /f %D/NMakefile BUILD=$(BUILD) SUBDIR=%D all-sub + +clean: + -rmdir 2>NUL /s/q $(TARGET) + -del 2>NUL win32\objects.mk + +real-clean vclean: clean + -del 2>NUL libssh2.dll + -del 2>NUL libssh2.exp + -del 2>NUL libssh2.ilk + -del 2>NUL libssh2.lib + -del 2>NUL *.pdb + +win32\objects.mk: Makefile.inc + @echo OBJECTS = \>$@ + @for %O in ($(OBJECTS)) do @echo $$(INTDIR)\%O \>>$@ + @echo $$(EOL)>>$@ + diff --git a/vendor/libssh2/README b/vendor/libssh2/README new file mode 100644 index 000000000..39abc202e --- /dev/null +++ b/vendor/libssh2/README @@ -0,0 +1,19 @@ +libssh2 - SSH2 library +====================== + +libssh2 is a library implementing the SSH2 protocol, available under +the revised BSD license. + +Web site: http://www.libssh2.org/ + +Mailing list: http://cool.haxx.se/mailman/listinfo/libssh2-devel + +License: see COPYING + +Source code: https://github.com/libssh2/libssh2 + +Web site source code: https://github.com/libssh2/www + +Installation instructions are in: + - docs/INSTALL_CMAKE for CMake + - docs/INSTALL_AUTOTOOLS for Autotools diff --git a/vendor/libssh2/RELEASE-NOTES b/vendor/libssh2/RELEASE-NOTES new file mode 100644 index 000000000..2b22288ce --- /dev/null +++ b/vendor/libssh2/RELEASE-NOTES @@ -0,0 +1,56 @@ +libssh2 1.7.0 + +This release includes the following changes: + + o libssh2_session_set_last_error: Add function + o mac: Add support for HMAC-SHA-256 and HMAC-SHA-512 + o WinCNG: support for SHA256/512 HMAC + o kex: Added diffie-hellman-group-exchange-sha256 support + o OS/400 crypto library QC3 support + +This release includes the following security advisory: + + o diffie_hellman_sha256: convert bytes to bits + CVE-2016-0787: http://www.libssh2.org/adv_20160223.html + +This release includes the following bugfixes: + + o SFTP: Increase speed and datasize in SFTP read + o openssl: make libssh2_sha1 return error code + o openssl: fix memleak in _libssh2_dsa_sha1_verify() + o cmake: include CMake files in the release tarballs + o Fix builds with Visual Studio 2015 + o hostkey.c: Fix compiling error when OPENSSL_NO_MD5 is defined + o GNUmakefile: add support for LIBSSH2_LDFLAG_EXTRAS + o GNUmakefile: add -m64 CFLAGS when targeting mingw64 + o kex: free server host key before allocating it (again) + o SCP: add libssh2_scp_recv2 to support large (> 2GB) files on windows + o channel: Detect bad usage of libssh2_channel_process_startup + o userauth: Fix off by one error when reading public key file + o kex: removed dupe entry from libssh2_kex_methods + o _libssh2_error: Support allocating the error message + o hostkey: fix invalid memory access if libssh2_dsa_new fails + o hostkey: align code path of ssh_rsa_init to ssh_dss_init + o libssh2.pc.in: fix the output of pkg-config --libs + o wincng: fixed possible memory leak in _libssh2_wincng_hash + o wincng: fixed _libssh2_wincng_hash_final return value + o add OpenSSL 1.1.0-pre2 compatibility + o agent_disconnect_unix: unset the agent fd after closing it + o sftp: stop reading when buffer is full + o sftp: Send at least one read request before reading + o sftp: Don't return EAGAIN if data was written to buffer + o sftp: Check read packet file offset + o configure: build "silent" if possible + o openssl: add OpenSSL 1.1.0-pre3-dev compatibility + o GNUmakefile: list system libs after user libs + +This release would not have looked like this without help, code, reports and +advice from friends like these: + + Alexander Lamaison, Andreas Schneider, brian m. carlson, Daniel Stenberg, + David Byron, Jakob Egger, Kamil Dudka, Marc Hoersken, Mizunashi Mana, + Patrick Monnerat, Paul Howarth, Salvador Fandino, Salvador Fandiño, + Salvador Fandiño, Viktor Szakats, Will Cosgrove, + (16 contributors) + + Thanks! (and sorry if I forgot to mention someone) diff --git a/vendor/libssh2/acinclude.m4 b/vendor/libssh2/acinclude.m4 new file mode 100644 index 000000000..71860d618 --- /dev/null +++ b/vendor/libssh2/acinclude.m4 @@ -0,0 +1,384 @@ + +dnl ********************************************************************** +dnl CURL_DETECT_ICC ([ACTION-IF-YES]) +dnl +dnl check if this is the Intel ICC compiler, and if so run the ACTION-IF-YES +dnl sets the $ICC variable to "yes" or "no" +dnl ********************************************************************** +AC_DEFUN([CURL_DETECT_ICC], +[ + ICC="no" + AC_MSG_CHECKING([for icc in use]) + if test "$GCC" = "yes"; then + dnl check if this is icc acting as gcc in disguise + AC_EGREP_CPP([^__INTEL_COMPILER], [__INTEL_COMPILER], + dnl action if the text is found, this it has not been replaced by the + dnl cpp + ICC="no", + dnl the text was not found, it was replaced by the cpp + ICC="yes" + AC_MSG_RESULT([yes]) + [$1] + ) + fi + if test "$ICC" = "no"; then + # this is not ICC + AC_MSG_RESULT([no]) + fi +]) + +dnl We create a function for detecting which compiler we use and then set as +dnl pendantic compiler options as possible for that particular compiler. The +dnl options are only used for debug-builds. + +AC_DEFUN([CURL_CC_DEBUG_OPTS], +[ + if test "z$ICC" = "z"; then + CURL_DETECT_ICC + fi + + if test "$GCC" = "yes"; then + + dnl figure out gcc version! + AC_MSG_CHECKING([gcc version]) + gccver=`$CC -dumpversion` + num1=`echo $gccver | cut -d . -f1` + num2=`echo $gccver | cut -d . -f2` + gccnum=`(expr $num1 "*" 100 + $num2) 2>/dev/null` + AC_MSG_RESULT($gccver) + + if test "$ICC" = "yes"; then + dnl this is icc, not gcc. + + dnl ICC warnings we ignore: + dnl * 269 warns on our "%Od" printf formatters for curl_off_t output: + dnl "invalid format string conversion" + dnl * 279 warns on static conditions in while expressions + dnl * 981 warns on "operands are evaluated in unspecified order" + dnl * 1418 "external definition with no prior declaration" + dnl * 1419 warns on "external declaration in primary source file" + dnl which we know and do on purpose. + + WARN="-wd279,269,981,1418,1419" + + if test "$gccnum" -gt "600"; then + dnl icc 6.0 and older doesn't have the -Wall flag + WARN="-Wall $WARN" + fi + else dnl $ICC = yes + dnl this is a set of options we believe *ALL* gcc versions support: + WARN="-W -Wall -Wwrite-strings -pedantic -Wpointer-arith -Wnested-externs -Winline -Wmissing-prototypes" + + dnl -Wcast-align is a bit too annoying on all gcc versions ;-) + + if test "$gccnum" -ge "207"; then + dnl gcc 2.7 or later + WARN="$WARN -Wmissing-declarations" + fi + + if test "$gccnum" -gt "295"; then + dnl only if the compiler is newer than 2.95 since we got lots of + dnl "`_POSIX_C_SOURCE' is not defined" in system headers with + dnl gcc 2.95.4 on FreeBSD 4.9! + WARN="$WARN -Wundef -Wno-long-long -Wsign-compare" + fi + + if test "$gccnum" -ge "296"; then + dnl gcc 2.96 or later + WARN="$WARN -Wfloat-equal" + fi + + if test "$gccnum" -gt "296"; then + dnl this option does not exist in 2.96 + WARN="$WARN -Wno-format-nonliteral" + fi + + dnl -Wunreachable-code seems totally unreliable on my gcc 3.3.2 on + dnl on i686-Linux as it gives us heaps with false positives. + dnl Also, on gcc 4.0.X it is totally unbearable and complains all + dnl over making it unusable for generic purposes. Let's not use it. + + if test "$gccnum" -ge "303"; then + dnl gcc 3.3 and later + WARN="$WARN -Wendif-labels -Wstrict-prototypes" + fi + + if test "$gccnum" -ge "304"; then + # try these on gcc 3.4 + WARN="$WARN -Wdeclaration-after-statement" + fi + + for flag in $CPPFLAGS; do + case "$flag" in + -I*) + dnl Include path, provide a -isystem option for the same dir + dnl to prevent warnings in those dirs. The -isystem was not very + dnl reliable on earlier gcc versions. + add=`echo $flag | sed 's/^-I/-isystem /g'` + WARN="$WARN $add" + ;; + esac + done + + fi dnl $ICC = no + + CFLAGS="$CFLAGS $WARN" + + AC_MSG_NOTICE([Added this set of compiler options: $WARN]) + + else dnl $GCC = yes + + AC_MSG_NOTICE([Added no extra compiler options]) + + fi dnl $GCC = yes + + dnl strip off optimizer flags + NEWFLAGS="" + for flag in $CFLAGS; do + case "$flag" in + -O*) + dnl echo "cut off $flag" + ;; + *) + NEWFLAGS="$NEWFLAGS $flag" + ;; + esac + done + CFLAGS=$NEWFLAGS + +]) dnl end of AC_DEFUN() + +dnl CURL_CHECK_NONBLOCKING_SOCKET +dnl ------------------------------------------------- +dnl Check for how to set a socket to non-blocking state. There seems to exist +dnl four known different ways, with the one used almost everywhere being POSIX +dnl and XPG3, while the other different ways for different systems (old BSD, +dnl Windows and Amiga). +dnl +dnl There are two known platforms (AIX 3.x and SunOS 4.1.x) where the +dnl O_NONBLOCK define is found but does not work. This condition is attempted +dnl to get caught in this script by using an excessive number of #ifdefs... +dnl +AC_DEFUN([CURL_CHECK_NONBLOCKING_SOCKET], +[ + AC_MSG_CHECKING([non-blocking sockets style]) + + AC_TRY_COMPILE([ +/* headers for O_NONBLOCK test */ +#include +#include +#include +],[ +/* try to compile O_NONBLOCK */ + +#if defined(sun) || defined(__sun__) || defined(__SUNPRO_C) || defined(__SUNPRO_CC) +# if defined(__SVR4) || defined(__srv4__) +# define PLATFORM_SOLARIS +# else +# define PLATFORM_SUNOS4 +# endif +#endif +#if (defined(_AIX) || defined(__xlC__)) && !defined(_AIX41) +# define PLATFORM_AIX_V3 +#endif + +#if defined(PLATFORM_SUNOS4) || defined(PLATFORM_AIX_V3) || defined(__BEOS__) +#error "O_NONBLOCK does not work on this platform" +#endif + int socket; + int flags = fcntl(socket, F_SETFL, flags | O_NONBLOCK); +],[ +dnl the O_NONBLOCK test was fine +nonblock="O_NONBLOCK" +AC_DEFINE(HAVE_O_NONBLOCK, 1, [use O_NONBLOCK for non-blocking sockets]) +],[ +dnl the code was bad, try a different program now, test 2 + + AC_TRY_COMPILE([ +/* headers for FIONBIO test */ +#include +#include +],[ +/* FIONBIO source test (old-style unix) */ + int socket; + int flags = ioctl(socket, FIONBIO, &flags); +],[ +dnl FIONBIO test was good +nonblock="FIONBIO" +AC_DEFINE(HAVE_FIONBIO, 1, [use FIONBIO for non-blocking sockets]) +],[ +dnl FIONBIO test was also bad +dnl the code was bad, try a different program now, test 3 + + AC_TRY_COMPILE([ +/* headers for ioctlsocket test (Windows) */ +#undef inline +#ifdef HAVE_WINDOWS_H +#ifndef WIN32_LEAN_AND_MEAN +#define WIN32_LEAN_AND_MEAN +#endif +#include +#ifdef HAVE_WINSOCK2_H +#include +#else +#ifdef HAVE_WINSOCK_H +#include +#endif +#endif +#endif +],[ +/* ioctlsocket source code */ + SOCKET sd; + unsigned long flags = 0; + sd = socket(0, 0, 0); + ioctlsocket(sd, FIONBIO, &flags); +],[ +dnl ioctlsocket test was good +nonblock="ioctlsocket" +AC_DEFINE(HAVE_IOCTLSOCKET, 1, [use ioctlsocket() for non-blocking sockets]) +],[ +dnl ioctlsocket didnt compile!, go to test 4 + + AC_TRY_LINK([ +/* headers for IoctlSocket test (Amiga?) */ +#include +],[ +/* IoctlSocket source code */ + int socket; + int flags = IoctlSocket(socket, FIONBIO, (long)1); +],[ +dnl ioctlsocket test was good +nonblock="IoctlSocket" +AC_DEFINE(HAVE_IOCTLSOCKET_CASE, 1, [use Ioctlsocket() for non-blocking sockets]) +],[ +dnl Ioctlsocket didnt compile, do test 5! + AC_TRY_COMPILE([ +/* headers for SO_NONBLOCK test (BeOS) */ +#include +],[ +/* SO_NONBLOCK source code */ + long b = 1; + int socket; + int flags = setsockopt(socket, SOL_SOCKET, SO_NONBLOCK, &b, sizeof(b)); +],[ +dnl the SO_NONBLOCK test was good +nonblock="SO_NONBLOCK" +AC_DEFINE(HAVE_SO_NONBLOCK, 1, [use SO_NONBLOCK for non-blocking sockets]) +],[ +dnl test 5 didnt compile! +nonblock="nada" +AC_DEFINE(HAVE_DISABLED_NONBLOCKING, 1, [disabled non-blocking sockets]) +]) +dnl end of fifth test + +]) +dnl end of forth test + +]) +dnl end of third test + +]) +dnl end of second test + +]) +dnl end of non-blocking try-compile test + AC_MSG_RESULT($nonblock) + + if test "$nonblock" = "nada"; then + AC_MSG_WARN([non-block sockets disabled]) + fi +]) + +dnl CURL_CHECK_NEED_REENTRANT_SYSTEM +dnl ------------------------------------------------- +dnl Checks if the preprocessor _REENTRANT definition +dnl must be unconditionally done for this platform. +dnl Internal macro for CURL_CONFIGURE_REENTRANT. + +AC_DEFUN([CURL_CHECK_NEED_REENTRANT_SYSTEM], [ + case $host in + *-*-solaris* | *-*-hpux*) + tmp_need_reentrant="yes" + ;; + *) + tmp_need_reentrant="no" + ;; + esac +]) + + +dnl CURL_CONFIGURE_FROM_NOW_ON_WITH_REENTRANT +dnl ------------------------------------------------- +dnl This macro ensures that configuration tests done +dnl after this will execute with preprocessor symbol +dnl _REENTRANT defined. This macro also ensures that +dnl the generated config file defines NEED_REENTRANT +dnl and that in turn setup.h will define _REENTRANT. +dnl Internal macro for CURL_CONFIGURE_REENTRANT. + +AC_DEFUN([CURL_CONFIGURE_FROM_NOW_ON_WITH_REENTRANT], [ +AC_DEFINE(NEED_REENTRANT, 1, + [Define to 1 if _REENTRANT preprocessor symbol must be defined.]) +cat >>confdefs.h <<_EOF +#ifndef _REENTRANT +# define _REENTRANT +#endif +_EOF +]) + + +dnl CURL_CONFIGURE_REENTRANT +dnl ------------------------------------------------- +dnl This first checks if the preprocessor _REENTRANT +dnl symbol is already defined. If it isn't currently +dnl defined a set of checks are performed to verify +dnl if its definition is required to make visible to +dnl the compiler a set of *_r functions. Finally, if +dnl _REENTRANT is already defined or needed it takes +dnl care of making adjustments necessary to ensure +dnl that it is defined equally for further configure +dnl tests and generated config file. + +AC_DEFUN([CURL_CONFIGURE_REENTRANT], [ + AC_PREREQ([2.50])dnl + # + AC_MSG_CHECKING([if _REENTRANT is already defined]) + AC_COMPILE_IFELSE([ + AC_LANG_PROGRAM([[ + ]],[[ +#ifdef _REENTRANT + int dummy=1; +#else + force compilation error +#endif + ]]) + ],[ + AC_MSG_RESULT([yes]) + tmp_reentrant_initially_defined="yes" + ],[ + AC_MSG_RESULT([no]) + tmp_reentrant_initially_defined="no" + ]) + # + if test "$tmp_reentrant_initially_defined" = "no"; then + AC_MSG_CHECKING([if _REENTRANT is actually needed]) + CURL_CHECK_NEED_REENTRANT_SYSTEM + + if test "$tmp_need_reentrant" = "yes"; then + AC_MSG_RESULT([yes]) + else + AC_MSG_RESULT([no]) + fi + fi + # + AC_MSG_CHECKING([if _REENTRANT is onwards defined]) + if test "$tmp_reentrant_initially_defined" = "yes" || + test "$tmp_need_reentrant" = "yes"; then + CURL_CONFIGURE_FROM_NOW_ON_WITH_REENTRANT + AC_MSG_RESULT([yes]) + else + AC_MSG_RESULT([no]) + fi + # +]) + diff --git a/vendor/libssh2/aclocal.m4 b/vendor/libssh2/aclocal.m4 new file mode 100644 index 000000000..41ad8c694 --- /dev/null +++ b/vendor/libssh2/aclocal.m4 @@ -0,0 +1,1198 @@ +# generated automatically by aclocal 1.15 -*- Autoconf -*- + +# Copyright (C) 1996-2014 Free Software Foundation, Inc. + +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY, to the extent permitted by law; without +# even the implied warranty of MERCHANTABILITY or FITNESS FOR A +# PARTICULAR PURPOSE. + +m4_ifndef([AC_CONFIG_MACRO_DIRS], [m4_defun([_AM_CONFIG_MACRO_DIRS], [])m4_defun([AC_CONFIG_MACRO_DIRS], [_AM_CONFIG_MACRO_DIRS($@)])]) +m4_ifndef([AC_AUTOCONF_VERSION], + [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl +m4_if(m4_defn([AC_AUTOCONF_VERSION]), [2.69],, +[m4_warning([this file was generated for autoconf 2.69. +You have another version of autoconf. It may work, but is not guaranteed to. +If you have problems, you may need to regenerate the build system entirely. +To do so, use the procedure documented by the package, typically 'autoreconf'.])]) + +# Copyright (C) 2002-2014 Free Software Foundation, Inc. +# +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# AM_AUTOMAKE_VERSION(VERSION) +# ---------------------------- +# Automake X.Y traces this macro to ensure aclocal.m4 has been +# generated from the m4 files accompanying Automake X.Y. +# (This private macro should not be called outside this file.) +AC_DEFUN([AM_AUTOMAKE_VERSION], +[am__api_version='1.15' +dnl Some users find AM_AUTOMAKE_VERSION and mistake it for a way to +dnl require some minimum version. Point them to the right macro. +m4_if([$1], [1.15], [], + [AC_FATAL([Do not call $0, use AM_INIT_AUTOMAKE([$1]).])])dnl +]) + +# _AM_AUTOCONF_VERSION(VERSION) +# ----------------------------- +# aclocal traces this macro to find the Autoconf version. +# This is a private macro too. Using m4_define simplifies +# the logic in aclocal, which can simply ignore this definition. +m4_define([_AM_AUTOCONF_VERSION], []) + +# AM_SET_CURRENT_AUTOMAKE_VERSION +# ------------------------------- +# Call AM_AUTOMAKE_VERSION and AM_AUTOMAKE_VERSION so they can be traced. +# This function is AC_REQUIREd by AM_INIT_AUTOMAKE. +AC_DEFUN([AM_SET_CURRENT_AUTOMAKE_VERSION], +[AM_AUTOMAKE_VERSION([1.15])dnl +m4_ifndef([AC_AUTOCONF_VERSION], + [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl +_AM_AUTOCONF_VERSION(m4_defn([AC_AUTOCONF_VERSION]))]) + +# AM_AUX_DIR_EXPAND -*- Autoconf -*- + +# Copyright (C) 2001-2014 Free Software Foundation, Inc. +# +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# For projects using AC_CONFIG_AUX_DIR([foo]), Autoconf sets +# $ac_aux_dir to '$srcdir/foo'. In other projects, it is set to +# '$srcdir', '$srcdir/..', or '$srcdir/../..'. +# +# Of course, Automake must honor this variable whenever it calls a +# tool from the auxiliary directory. The problem is that $srcdir (and +# therefore $ac_aux_dir as well) can be either absolute or relative, +# depending on how configure is run. This is pretty annoying, since +# it makes $ac_aux_dir quite unusable in subdirectories: in the top +# source directory, any form will work fine, but in subdirectories a +# relative path needs to be adjusted first. +# +# $ac_aux_dir/missing +# fails when called from a subdirectory if $ac_aux_dir is relative +# $top_srcdir/$ac_aux_dir/missing +# fails if $ac_aux_dir is absolute, +# fails when called from a subdirectory in a VPATH build with +# a relative $ac_aux_dir +# +# The reason of the latter failure is that $top_srcdir and $ac_aux_dir +# are both prefixed by $srcdir. In an in-source build this is usually +# harmless because $srcdir is '.', but things will broke when you +# start a VPATH build or use an absolute $srcdir. +# +# So we could use something similar to $top_srcdir/$ac_aux_dir/missing, +# iff we strip the leading $srcdir from $ac_aux_dir. That would be: +# am_aux_dir='\$(top_srcdir)/'`expr "$ac_aux_dir" : "$srcdir//*\(.*\)"` +# and then we would define $MISSING as +# MISSING="\${SHELL} $am_aux_dir/missing" +# This will work as long as MISSING is not called from configure, because +# unfortunately $(top_srcdir) has no meaning in configure. +# However there are other variables, like CC, which are often used in +# configure, and could therefore not use this "fixed" $ac_aux_dir. +# +# Another solution, used here, is to always expand $ac_aux_dir to an +# absolute PATH. The drawback is that using absolute paths prevent a +# configured tree to be moved without reconfiguration. + +AC_DEFUN([AM_AUX_DIR_EXPAND], +[AC_REQUIRE([AC_CONFIG_AUX_DIR_DEFAULT])dnl +# Expand $ac_aux_dir to an absolute path. +am_aux_dir=`cd "$ac_aux_dir" && pwd` +]) + +# AM_CONDITIONAL -*- Autoconf -*- + +# Copyright (C) 1997-2014 Free Software Foundation, Inc. +# +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# AM_CONDITIONAL(NAME, SHELL-CONDITION) +# ------------------------------------- +# Define a conditional. +AC_DEFUN([AM_CONDITIONAL], +[AC_PREREQ([2.52])dnl + m4_if([$1], [TRUE], [AC_FATAL([$0: invalid condition: $1])], + [$1], [FALSE], [AC_FATAL([$0: invalid condition: $1])])dnl +AC_SUBST([$1_TRUE])dnl +AC_SUBST([$1_FALSE])dnl +_AM_SUBST_NOTMAKE([$1_TRUE])dnl +_AM_SUBST_NOTMAKE([$1_FALSE])dnl +m4_define([_AM_COND_VALUE_$1], [$2])dnl +if $2; then + $1_TRUE= + $1_FALSE='#' +else + $1_TRUE='#' + $1_FALSE= +fi +AC_CONFIG_COMMANDS_PRE( +[if test -z "${$1_TRUE}" && test -z "${$1_FALSE}"; then + AC_MSG_ERROR([[conditional "$1" was never defined. +Usually this means the macro was only invoked conditionally.]]) +fi])]) + +# Copyright (C) 1999-2014 Free Software Foundation, Inc. +# +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + + +# There are a few dirty hacks below to avoid letting 'AC_PROG_CC' be +# written in clear, in which case automake, when reading aclocal.m4, +# will think it sees a *use*, and therefore will trigger all it's +# C support machinery. Also note that it means that autoscan, seeing +# CC etc. in the Makefile, will ask for an AC_PROG_CC use... + + +# _AM_DEPENDENCIES(NAME) +# ---------------------- +# See how the compiler implements dependency checking. +# NAME is "CC", "CXX", "OBJC", "OBJCXX", "UPC", or "GJC". +# We try a few techniques and use that to set a single cache variable. +# +# We don't AC_REQUIRE the corresponding AC_PROG_CC since the latter was +# modified to invoke _AM_DEPENDENCIES(CC); we would have a circular +# dependency, and given that the user is not expected to run this macro, +# just rely on AC_PROG_CC. +AC_DEFUN([_AM_DEPENDENCIES], +[AC_REQUIRE([AM_SET_DEPDIR])dnl +AC_REQUIRE([AM_OUTPUT_DEPENDENCY_COMMANDS])dnl +AC_REQUIRE([AM_MAKE_INCLUDE])dnl +AC_REQUIRE([AM_DEP_TRACK])dnl + +m4_if([$1], [CC], [depcc="$CC" am_compiler_list=], + [$1], [CXX], [depcc="$CXX" am_compiler_list=], + [$1], [OBJC], [depcc="$OBJC" am_compiler_list='gcc3 gcc'], + [$1], [OBJCXX], [depcc="$OBJCXX" am_compiler_list='gcc3 gcc'], + [$1], [UPC], [depcc="$UPC" am_compiler_list=], + [$1], [GCJ], [depcc="$GCJ" am_compiler_list='gcc3 gcc'], + [depcc="$$1" am_compiler_list=]) + +AC_CACHE_CHECK([dependency style of $depcc], + [am_cv_$1_dependencies_compiler_type], +[if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then + # We make a subdir and do the tests there. Otherwise we can end up + # making bogus files that we don't know about and never remove. For + # instance it was reported that on HP-UX the gcc test will end up + # making a dummy file named 'D' -- because '-MD' means "put the output + # in D". + rm -rf conftest.dir + mkdir conftest.dir + # Copy depcomp to subdir because otherwise we won't find it if we're + # using a relative directory. + cp "$am_depcomp" conftest.dir + cd conftest.dir + # We will build objects and dependencies in a subdirectory because + # it helps to detect inapplicable dependency modes. For instance + # both Tru64's cc and ICC support -MD to output dependencies as a + # side effect of compilation, but ICC will put the dependencies in + # the current directory while Tru64 will put them in the object + # directory. + mkdir sub + + am_cv_$1_dependencies_compiler_type=none + if test "$am_compiler_list" = ""; then + am_compiler_list=`sed -n ['s/^#*\([a-zA-Z0-9]*\))$/\1/p'] < ./depcomp` + fi + am__universal=false + m4_case([$1], [CC], + [case " $depcc " in #( + *\ -arch\ *\ -arch\ *) am__universal=true ;; + esac], + [CXX], + [case " $depcc " in #( + *\ -arch\ *\ -arch\ *) am__universal=true ;; + esac]) + + for depmode in $am_compiler_list; do + # Setup a source with many dependencies, because some compilers + # like to wrap large dependency lists on column 80 (with \), and + # we should not choose a depcomp mode which is confused by this. + # + # We need to recreate these files for each test, as the compiler may + # overwrite some of them when testing with obscure command lines. + # This happens at least with the AIX C compiler. + : > sub/conftest.c + for i in 1 2 3 4 5 6; do + echo '#include "conftst'$i'.h"' >> sub/conftest.c + # Using ": > sub/conftst$i.h" creates only sub/conftst1.h with + # Solaris 10 /bin/sh. + echo '/* dummy */' > sub/conftst$i.h + done + echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf + + # We check with '-c' and '-o' for the sake of the "dashmstdout" + # mode. It turns out that the SunPro C++ compiler does not properly + # handle '-M -o', and we need to detect this. Also, some Intel + # versions had trouble with output in subdirs. + am__obj=sub/conftest.${OBJEXT-o} + am__minus_obj="-o $am__obj" + case $depmode in + gcc) + # This depmode causes a compiler race in universal mode. + test "$am__universal" = false || continue + ;; + nosideeffect) + # After this tag, mechanisms are not by side-effect, so they'll + # only be used when explicitly requested. + if test "x$enable_dependency_tracking" = xyes; then + continue + else + break + fi + ;; + msvc7 | msvc7msys | msvisualcpp | msvcmsys) + # This compiler won't grok '-c -o', but also, the minuso test has + # not run yet. These depmodes are late enough in the game, and + # so weak that their functioning should not be impacted. + am__obj=conftest.${OBJEXT-o} + am__minus_obj= + ;; + none) break ;; + esac + if depmode=$depmode \ + source=sub/conftest.c object=$am__obj \ + depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \ + $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \ + >/dev/null 2>conftest.err && + grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 && + grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 && + grep $am__obj sub/conftest.Po > /dev/null 2>&1 && + ${MAKE-make} -s -f confmf > /dev/null 2>&1; then + # icc doesn't choke on unknown options, it will just issue warnings + # or remarks (even with -Werror). So we grep stderr for any message + # that says an option was ignored or not supported. + # When given -MP, icc 7.0 and 7.1 complain thusly: + # icc: Command line warning: ignoring option '-M'; no argument required + # The diagnosis changed in icc 8.0: + # icc: Command line remark: option '-MP' not supported + if (grep 'ignoring option' conftest.err || + grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else + am_cv_$1_dependencies_compiler_type=$depmode + break + fi + fi + done + + cd .. + rm -rf conftest.dir +else + am_cv_$1_dependencies_compiler_type=none +fi +]) +AC_SUBST([$1DEPMODE], [depmode=$am_cv_$1_dependencies_compiler_type]) +AM_CONDITIONAL([am__fastdep$1], [ + test "x$enable_dependency_tracking" != xno \ + && test "$am_cv_$1_dependencies_compiler_type" = gcc3]) +]) + + +# AM_SET_DEPDIR +# ------------- +# Choose a directory name for dependency files. +# This macro is AC_REQUIREd in _AM_DEPENDENCIES. +AC_DEFUN([AM_SET_DEPDIR], +[AC_REQUIRE([AM_SET_LEADING_DOT])dnl +AC_SUBST([DEPDIR], ["${am__leading_dot}deps"])dnl +]) + + +# AM_DEP_TRACK +# ------------ +AC_DEFUN([AM_DEP_TRACK], +[AC_ARG_ENABLE([dependency-tracking], [dnl +AS_HELP_STRING( + [--enable-dependency-tracking], + [do not reject slow dependency extractors]) +AS_HELP_STRING( + [--disable-dependency-tracking], + [speeds up one-time build])]) +if test "x$enable_dependency_tracking" != xno; then + am_depcomp="$ac_aux_dir/depcomp" + AMDEPBACKSLASH='\' + am__nodep='_no' +fi +AM_CONDITIONAL([AMDEP], [test "x$enable_dependency_tracking" != xno]) +AC_SUBST([AMDEPBACKSLASH])dnl +_AM_SUBST_NOTMAKE([AMDEPBACKSLASH])dnl +AC_SUBST([am__nodep])dnl +_AM_SUBST_NOTMAKE([am__nodep])dnl +]) + +# Generate code to set up dependency tracking. -*- Autoconf -*- + +# Copyright (C) 1999-2014 Free Software Foundation, Inc. +# +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + + +# _AM_OUTPUT_DEPENDENCY_COMMANDS +# ------------------------------ +AC_DEFUN([_AM_OUTPUT_DEPENDENCY_COMMANDS], +[{ + # Older Autoconf quotes --file arguments for eval, but not when files + # are listed without --file. Let's play safe and only enable the eval + # if we detect the quoting. + case $CONFIG_FILES in + *\'*) eval set x "$CONFIG_FILES" ;; + *) set x $CONFIG_FILES ;; + esac + shift + for mf + do + # Strip MF so we end up with the name of the file. + mf=`echo "$mf" | sed -e 's/:.*$//'` + # Check whether this is an Automake generated Makefile or not. + # We used to match only the files named 'Makefile.in', but + # some people rename them; so instead we look at the file content. + # Grep'ing the first line is not enough: some people post-process + # each Makefile.in and add a new line on top of each file to say so. + # Grep'ing the whole file is not good either: AIX grep has a line + # limit of 2048, but all sed's we know have understand at least 4000. + if sed -n 's,^#.*generated by automake.*,X,p' "$mf" | grep X >/dev/null 2>&1; then + dirpart=`AS_DIRNAME("$mf")` + else + continue + fi + # Extract the definition of DEPDIR, am__include, and am__quote + # from the Makefile without running 'make'. + DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"` + test -z "$DEPDIR" && continue + am__include=`sed -n 's/^am__include = //p' < "$mf"` + test -z "$am__include" && continue + am__quote=`sed -n 's/^am__quote = //p' < "$mf"` + # Find all dependency output files, they are included files with + # $(DEPDIR) in their names. We invoke sed twice because it is the + # simplest approach to changing $(DEPDIR) to its actual value in the + # expansion. + for file in `sed -n " + s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \ + sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g'`; do + # Make sure the directory exists. + test -f "$dirpart/$file" && continue + fdir=`AS_DIRNAME(["$file"])` + AS_MKDIR_P([$dirpart/$fdir]) + # echo "creating $dirpart/$file" + echo '# dummy' > "$dirpart/$file" + done + done +} +])# _AM_OUTPUT_DEPENDENCY_COMMANDS + + +# AM_OUTPUT_DEPENDENCY_COMMANDS +# ----------------------------- +# This macro should only be invoked once -- use via AC_REQUIRE. +# +# This code is only required when automatic dependency tracking +# is enabled. FIXME. This creates each '.P' file that we will +# need in order to bootstrap the dependency handling code. +AC_DEFUN([AM_OUTPUT_DEPENDENCY_COMMANDS], +[AC_CONFIG_COMMANDS([depfiles], + [test x"$AMDEP_TRUE" != x"" || _AM_OUTPUT_DEPENDENCY_COMMANDS], + [AMDEP_TRUE="$AMDEP_TRUE" ac_aux_dir="$ac_aux_dir"]) +]) + +# Do all the work for Automake. -*- Autoconf -*- + +# Copyright (C) 1996-2014 Free Software Foundation, Inc. +# +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# This macro actually does too much. Some checks are only needed if +# your package does certain things. But this isn't really a big deal. + +dnl Redefine AC_PROG_CC to automatically invoke _AM_PROG_CC_C_O. +m4_define([AC_PROG_CC], +m4_defn([AC_PROG_CC]) +[_AM_PROG_CC_C_O +]) + +# AM_INIT_AUTOMAKE(PACKAGE, VERSION, [NO-DEFINE]) +# AM_INIT_AUTOMAKE([OPTIONS]) +# ----------------------------------------------- +# The call with PACKAGE and VERSION arguments is the old style +# call (pre autoconf-2.50), which is being phased out. PACKAGE +# and VERSION should now be passed to AC_INIT and removed from +# the call to AM_INIT_AUTOMAKE. +# We support both call styles for the transition. After +# the next Automake release, Autoconf can make the AC_INIT +# arguments mandatory, and then we can depend on a new Autoconf +# release and drop the old call support. +AC_DEFUN([AM_INIT_AUTOMAKE], +[AC_PREREQ([2.65])dnl +dnl Autoconf wants to disallow AM_ names. We explicitly allow +dnl the ones we care about. +m4_pattern_allow([^AM_[A-Z]+FLAGS$])dnl +AC_REQUIRE([AM_SET_CURRENT_AUTOMAKE_VERSION])dnl +AC_REQUIRE([AC_PROG_INSTALL])dnl +if test "`cd $srcdir && pwd`" != "`pwd`"; then + # Use -I$(srcdir) only when $(srcdir) != ., so that make's output + # is not polluted with repeated "-I." + AC_SUBST([am__isrc], [' -I$(srcdir)'])_AM_SUBST_NOTMAKE([am__isrc])dnl + # test to see if srcdir already configured + if test -f $srcdir/config.status; then + AC_MSG_ERROR([source directory already configured; run "make distclean" there first]) + fi +fi + +# test whether we have cygpath +if test -z "$CYGPATH_W"; then + if (cygpath --version) >/dev/null 2>/dev/null; then + CYGPATH_W='cygpath -w' + else + CYGPATH_W=echo + fi +fi +AC_SUBST([CYGPATH_W]) + +# Define the identity of the package. +dnl Distinguish between old-style and new-style calls. +m4_ifval([$2], +[AC_DIAGNOSE([obsolete], + [$0: two- and three-arguments forms are deprecated.]) +m4_ifval([$3], [_AM_SET_OPTION([no-define])])dnl + AC_SUBST([PACKAGE], [$1])dnl + AC_SUBST([VERSION], [$2])], +[_AM_SET_OPTIONS([$1])dnl +dnl Diagnose old-style AC_INIT with new-style AM_AUTOMAKE_INIT. +m4_if( + m4_ifdef([AC_PACKAGE_NAME], [ok]):m4_ifdef([AC_PACKAGE_VERSION], [ok]), + [ok:ok],, + [m4_fatal([AC_INIT should be called with package and version arguments])])dnl + AC_SUBST([PACKAGE], ['AC_PACKAGE_TARNAME'])dnl + AC_SUBST([VERSION], ['AC_PACKAGE_VERSION'])])dnl + +_AM_IF_OPTION([no-define],, +[AC_DEFINE_UNQUOTED([PACKAGE], ["$PACKAGE"], [Name of package]) + AC_DEFINE_UNQUOTED([VERSION], ["$VERSION"], [Version number of package])])dnl + +# Some tools Automake needs. +AC_REQUIRE([AM_SANITY_CHECK])dnl +AC_REQUIRE([AC_ARG_PROGRAM])dnl +AM_MISSING_PROG([ACLOCAL], [aclocal-${am__api_version}]) +AM_MISSING_PROG([AUTOCONF], [autoconf]) +AM_MISSING_PROG([AUTOMAKE], [automake-${am__api_version}]) +AM_MISSING_PROG([AUTOHEADER], [autoheader]) +AM_MISSING_PROG([MAKEINFO], [makeinfo]) +AC_REQUIRE([AM_PROG_INSTALL_SH])dnl +AC_REQUIRE([AM_PROG_INSTALL_STRIP])dnl +AC_REQUIRE([AC_PROG_MKDIR_P])dnl +# For better backward compatibility. To be removed once Automake 1.9.x +# dies out for good. For more background, see: +# +# +AC_SUBST([mkdir_p], ['$(MKDIR_P)']) +# We need awk for the "check" target (and possibly the TAP driver). The +# system "awk" is bad on some platforms. +AC_REQUIRE([AC_PROG_AWK])dnl +AC_REQUIRE([AC_PROG_MAKE_SET])dnl +AC_REQUIRE([AM_SET_LEADING_DOT])dnl +_AM_IF_OPTION([tar-ustar], [_AM_PROG_TAR([ustar])], + [_AM_IF_OPTION([tar-pax], [_AM_PROG_TAR([pax])], + [_AM_PROG_TAR([v7])])]) +_AM_IF_OPTION([no-dependencies],, +[AC_PROVIDE_IFELSE([AC_PROG_CC], + [_AM_DEPENDENCIES([CC])], + [m4_define([AC_PROG_CC], + m4_defn([AC_PROG_CC])[_AM_DEPENDENCIES([CC])])])dnl +AC_PROVIDE_IFELSE([AC_PROG_CXX], + [_AM_DEPENDENCIES([CXX])], + [m4_define([AC_PROG_CXX], + m4_defn([AC_PROG_CXX])[_AM_DEPENDENCIES([CXX])])])dnl +AC_PROVIDE_IFELSE([AC_PROG_OBJC], + [_AM_DEPENDENCIES([OBJC])], + [m4_define([AC_PROG_OBJC], + m4_defn([AC_PROG_OBJC])[_AM_DEPENDENCIES([OBJC])])])dnl +AC_PROVIDE_IFELSE([AC_PROG_OBJCXX], + [_AM_DEPENDENCIES([OBJCXX])], + [m4_define([AC_PROG_OBJCXX], + m4_defn([AC_PROG_OBJCXX])[_AM_DEPENDENCIES([OBJCXX])])])dnl +]) +AC_REQUIRE([AM_SILENT_RULES])dnl +dnl The testsuite driver may need to know about EXEEXT, so add the +dnl 'am__EXEEXT' conditional if _AM_COMPILER_EXEEXT was seen. This +dnl macro is hooked onto _AC_COMPILER_EXEEXT early, see below. +AC_CONFIG_COMMANDS_PRE(dnl +[m4_provide_if([_AM_COMPILER_EXEEXT], + [AM_CONDITIONAL([am__EXEEXT], [test -n "$EXEEXT"])])])dnl + +# POSIX will say in a future version that running "rm -f" with no argument +# is OK; and we want to be able to make that assumption in our Makefile +# recipes. So use an aggressive probe to check that the usage we want is +# actually supported "in the wild" to an acceptable degree. +# See automake bug#10828. +# To make any issue more visible, cause the running configure to be aborted +# by default if the 'rm' program in use doesn't match our expectations; the +# user can still override this though. +if rm -f && rm -fr && rm -rf; then : OK; else + cat >&2 <<'END' +Oops! + +Your 'rm' program seems unable to run without file operands specified +on the command line, even when the '-f' option is present. This is contrary +to the behaviour of most rm programs out there, and not conforming with +the upcoming POSIX standard: + +Please tell bug-automake@gnu.org about your system, including the value +of your $PATH and any error possibly output before this message. This +can help us improve future automake versions. + +END + if test x"$ACCEPT_INFERIOR_RM_PROGRAM" = x"yes"; then + echo 'Configuration will proceed anyway, since you have set the' >&2 + echo 'ACCEPT_INFERIOR_RM_PROGRAM variable to "yes"' >&2 + echo >&2 + else + cat >&2 <<'END' +Aborting the configuration process, to ensure you take notice of the issue. + +You can download and install GNU coreutils to get an 'rm' implementation +that behaves properly: . + +If you want to complete the configuration process using your problematic +'rm' anyway, export the environment variable ACCEPT_INFERIOR_RM_PROGRAM +to "yes", and re-run configure. + +END + AC_MSG_ERROR([Your 'rm' program is bad, sorry.]) + fi +fi +dnl The trailing newline in this macro's definition is deliberate, for +dnl backward compatibility and to allow trailing 'dnl'-style comments +dnl after the AM_INIT_AUTOMAKE invocation. See automake bug#16841. +]) + +dnl Hook into '_AC_COMPILER_EXEEXT' early to learn its expansion. Do not +dnl add the conditional right here, as _AC_COMPILER_EXEEXT may be further +dnl mangled by Autoconf and run in a shell conditional statement. +m4_define([_AC_COMPILER_EXEEXT], +m4_defn([_AC_COMPILER_EXEEXT])[m4_provide([_AM_COMPILER_EXEEXT])]) + +# When config.status generates a header, we must update the stamp-h file. +# This file resides in the same directory as the config header +# that is generated. The stamp files are numbered to have different names. + +# Autoconf calls _AC_AM_CONFIG_HEADER_HOOK (when defined) in the +# loop where config.status creates the headers, so we can generate +# our stamp files there. +AC_DEFUN([_AC_AM_CONFIG_HEADER_HOOK], +[# Compute $1's index in $config_headers. +_am_arg=$1 +_am_stamp_count=1 +for _am_header in $config_headers :; do + case $_am_header in + $_am_arg | $_am_arg:* ) + break ;; + * ) + _am_stamp_count=`expr $_am_stamp_count + 1` ;; + esac +done +echo "timestamp for $_am_arg" >`AS_DIRNAME(["$_am_arg"])`/stamp-h[]$_am_stamp_count]) + +# Copyright (C) 2001-2014 Free Software Foundation, Inc. +# +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# AM_PROG_INSTALL_SH +# ------------------ +# Define $install_sh. +AC_DEFUN([AM_PROG_INSTALL_SH], +[AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl +if test x"${install_sh+set}" != xset; then + case $am_aux_dir in + *\ * | *\ *) + install_sh="\${SHELL} '$am_aux_dir/install-sh'" ;; + *) + install_sh="\${SHELL} $am_aux_dir/install-sh" + esac +fi +AC_SUBST([install_sh])]) + +# Copyright (C) 2003-2014 Free Software Foundation, Inc. +# +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# Check whether the underlying file-system supports filenames +# with a leading dot. For instance MS-DOS doesn't. +AC_DEFUN([AM_SET_LEADING_DOT], +[rm -rf .tst 2>/dev/null +mkdir .tst 2>/dev/null +if test -d .tst; then + am__leading_dot=. +else + am__leading_dot=_ +fi +rmdir .tst 2>/dev/null +AC_SUBST([am__leading_dot])]) + +# Add --enable-maintainer-mode option to configure. -*- Autoconf -*- +# From Jim Meyering + +# Copyright (C) 1996-2014 Free Software Foundation, Inc. +# +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# AM_MAINTAINER_MODE([DEFAULT-MODE]) +# ---------------------------------- +# Control maintainer-specific portions of Makefiles. +# Default is to disable them, unless 'enable' is passed literally. +# For symmetry, 'disable' may be passed as well. Anyway, the user +# can override the default with the --enable/--disable switch. +AC_DEFUN([AM_MAINTAINER_MODE], +[m4_case(m4_default([$1], [disable]), + [enable], [m4_define([am_maintainer_other], [disable])], + [disable], [m4_define([am_maintainer_other], [enable])], + [m4_define([am_maintainer_other], [enable]) + m4_warn([syntax], [unexpected argument to AM@&t@_MAINTAINER_MODE: $1])]) +AC_MSG_CHECKING([whether to enable maintainer-specific portions of Makefiles]) + dnl maintainer-mode's default is 'disable' unless 'enable' is passed + AC_ARG_ENABLE([maintainer-mode], + [AS_HELP_STRING([--]am_maintainer_other[-maintainer-mode], + am_maintainer_other[ make rules and dependencies not useful + (and sometimes confusing) to the casual installer])], + [USE_MAINTAINER_MODE=$enableval], + [USE_MAINTAINER_MODE=]m4_if(am_maintainer_other, [enable], [no], [yes])) + AC_MSG_RESULT([$USE_MAINTAINER_MODE]) + AM_CONDITIONAL([MAINTAINER_MODE], [test $USE_MAINTAINER_MODE = yes]) + MAINT=$MAINTAINER_MODE_TRUE + AC_SUBST([MAINT])dnl +] +) + +# Check to see how 'make' treats includes. -*- Autoconf -*- + +# Copyright (C) 2001-2014 Free Software Foundation, Inc. +# +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# AM_MAKE_INCLUDE() +# ----------------- +# Check to see how make treats includes. +AC_DEFUN([AM_MAKE_INCLUDE], +[am_make=${MAKE-make} +cat > confinc << 'END' +am__doit: + @echo this is the am__doit target +.PHONY: am__doit +END +# If we don't find an include directive, just comment out the code. +AC_MSG_CHECKING([for style of include used by $am_make]) +am__include="#" +am__quote= +_am_result=none +# First try GNU make style include. +echo "include confinc" > confmf +# Ignore all kinds of additional output from 'make'. +case `$am_make -s -f confmf 2> /dev/null` in #( +*the\ am__doit\ target*) + am__include=include + am__quote= + _am_result=GNU + ;; +esac +# Now try BSD make style include. +if test "$am__include" = "#"; then + echo '.include "confinc"' > confmf + case `$am_make -s -f confmf 2> /dev/null` in #( + *the\ am__doit\ target*) + am__include=.include + am__quote="\"" + _am_result=BSD + ;; + esac +fi +AC_SUBST([am__include]) +AC_SUBST([am__quote]) +AC_MSG_RESULT([$_am_result]) +rm -f confinc confmf +]) + +# Fake the existence of programs that GNU maintainers use. -*- Autoconf -*- + +# Copyright (C) 1997-2014 Free Software Foundation, Inc. +# +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# AM_MISSING_PROG(NAME, PROGRAM) +# ------------------------------ +AC_DEFUN([AM_MISSING_PROG], +[AC_REQUIRE([AM_MISSING_HAS_RUN]) +$1=${$1-"${am_missing_run}$2"} +AC_SUBST($1)]) + +# AM_MISSING_HAS_RUN +# ------------------ +# Define MISSING if not defined so far and test if it is modern enough. +# If it is, set am_missing_run to use it, otherwise, to nothing. +AC_DEFUN([AM_MISSING_HAS_RUN], +[AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl +AC_REQUIRE_AUX_FILE([missing])dnl +if test x"${MISSING+set}" != xset; then + case $am_aux_dir in + *\ * | *\ *) + MISSING="\${SHELL} \"$am_aux_dir/missing\"" ;; + *) + MISSING="\${SHELL} $am_aux_dir/missing" ;; + esac +fi +# Use eval to expand $SHELL +if eval "$MISSING --is-lightweight"; then + am_missing_run="$MISSING " +else + am_missing_run= + AC_MSG_WARN(['missing' script is too old or missing]) +fi +]) + +# Helper functions for option handling. -*- Autoconf -*- + +# Copyright (C) 2001-2014 Free Software Foundation, Inc. +# +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# _AM_MANGLE_OPTION(NAME) +# ----------------------- +AC_DEFUN([_AM_MANGLE_OPTION], +[[_AM_OPTION_]m4_bpatsubst($1, [[^a-zA-Z0-9_]], [_])]) + +# _AM_SET_OPTION(NAME) +# -------------------- +# Set option NAME. Presently that only means defining a flag for this option. +AC_DEFUN([_AM_SET_OPTION], +[m4_define(_AM_MANGLE_OPTION([$1]), [1])]) + +# _AM_SET_OPTIONS(OPTIONS) +# ------------------------ +# OPTIONS is a space-separated list of Automake options. +AC_DEFUN([_AM_SET_OPTIONS], +[m4_foreach_w([_AM_Option], [$1], [_AM_SET_OPTION(_AM_Option)])]) + +# _AM_IF_OPTION(OPTION, IF-SET, [IF-NOT-SET]) +# ------------------------------------------- +# Execute IF-SET if OPTION is set, IF-NOT-SET otherwise. +AC_DEFUN([_AM_IF_OPTION], +[m4_ifset(_AM_MANGLE_OPTION([$1]), [$2], [$3])]) + +# Copyright (C) 1999-2014 Free Software Foundation, Inc. +# +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# _AM_PROG_CC_C_O +# --------------- +# Like AC_PROG_CC_C_O, but changed for automake. We rewrite AC_PROG_CC +# to automatically call this. +AC_DEFUN([_AM_PROG_CC_C_O], +[AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl +AC_REQUIRE_AUX_FILE([compile])dnl +AC_LANG_PUSH([C])dnl +AC_CACHE_CHECK( + [whether $CC understands -c and -o together], + [am_cv_prog_cc_c_o], + [AC_LANG_CONFTEST([AC_LANG_PROGRAM([])]) + # Make sure it works both with $CC and with simple cc. + # Following AC_PROG_CC_C_O, we do the test twice because some + # compilers refuse to overwrite an existing .o file with -o, + # though they will create one. + am_cv_prog_cc_c_o=yes + for am_i in 1 2; do + if AM_RUN_LOG([$CC -c conftest.$ac_ext -o conftest2.$ac_objext]) \ + && test -f conftest2.$ac_objext; then + : OK + else + am_cv_prog_cc_c_o=no + break + fi + done + rm -f core conftest* + unset am_i]) +if test "$am_cv_prog_cc_c_o" != yes; then + # Losing compiler, so override with the script. + # FIXME: It is wrong to rewrite CC. + # But if we don't then we get into trouble of one sort or another. + # A longer-term fix would be to have automake use am__CC in this case, + # and then we could set am__CC="\$(top_srcdir)/compile \$(CC)" + CC="$am_aux_dir/compile $CC" +fi +AC_LANG_POP([C])]) + +# For backward compatibility. +AC_DEFUN_ONCE([AM_PROG_CC_C_O], [AC_REQUIRE([AC_PROG_CC])]) + +# Copyright (C) 2001-2014 Free Software Foundation, Inc. +# +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# AM_RUN_LOG(COMMAND) +# ------------------- +# Run COMMAND, save the exit status in ac_status, and log it. +# (This has been adapted from Autoconf's _AC_RUN_LOG macro.) +AC_DEFUN([AM_RUN_LOG], +[{ echo "$as_me:$LINENO: $1" >&AS_MESSAGE_LOG_FD + ($1) >&AS_MESSAGE_LOG_FD 2>&AS_MESSAGE_LOG_FD + ac_status=$? + echo "$as_me:$LINENO: \$? = $ac_status" >&AS_MESSAGE_LOG_FD + (exit $ac_status); }]) + +# Check to make sure that the build environment is sane. -*- Autoconf -*- + +# Copyright (C) 1996-2014 Free Software Foundation, Inc. +# +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# AM_SANITY_CHECK +# --------------- +AC_DEFUN([AM_SANITY_CHECK], +[AC_MSG_CHECKING([whether build environment is sane]) +# Reject unsafe characters in $srcdir or the absolute working directory +# name. Accept space and tab only in the latter. +am_lf=' +' +case `pwd` in + *[[\\\"\#\$\&\'\`$am_lf]]*) + AC_MSG_ERROR([unsafe absolute working directory name]);; +esac +case $srcdir in + *[[\\\"\#\$\&\'\`$am_lf\ \ ]]*) + AC_MSG_ERROR([unsafe srcdir value: '$srcdir']);; +esac + +# Do 'set' in a subshell so we don't clobber the current shell's +# arguments. Must try -L first in case configure is actually a +# symlink; some systems play weird games with the mod time of symlinks +# (eg FreeBSD returns the mod time of the symlink's containing +# directory). +if ( + am_has_slept=no + for am_try in 1 2; do + echo "timestamp, slept: $am_has_slept" > conftest.file + set X `ls -Lt "$srcdir/configure" conftest.file 2> /dev/null` + if test "$[*]" = "X"; then + # -L didn't work. + set X `ls -t "$srcdir/configure" conftest.file` + fi + if test "$[*]" != "X $srcdir/configure conftest.file" \ + && test "$[*]" != "X conftest.file $srcdir/configure"; then + + # If neither matched, then we have a broken ls. This can happen + # if, for instance, CONFIG_SHELL is bash and it inherits a + # broken ls alias from the environment. This has actually + # happened. Such a system could not be considered "sane". + AC_MSG_ERROR([ls -t appears to fail. Make sure there is not a broken + alias in your environment]) + fi + if test "$[2]" = conftest.file || test $am_try -eq 2; then + break + fi + # Just in case. + sleep 1 + am_has_slept=yes + done + test "$[2]" = conftest.file + ) +then + # Ok. + : +else + AC_MSG_ERROR([newly created file is older than distributed files! +Check your system clock]) +fi +AC_MSG_RESULT([yes]) +# If we didn't sleep, we still need to ensure time stamps of config.status and +# generated files are strictly newer. +am_sleep_pid= +if grep 'slept: no' conftest.file >/dev/null 2>&1; then + ( sleep 1 ) & + am_sleep_pid=$! +fi +AC_CONFIG_COMMANDS_PRE( + [AC_MSG_CHECKING([that generated files are newer than configure]) + if test -n "$am_sleep_pid"; then + # Hide warnings about reused PIDs. + wait $am_sleep_pid 2>/dev/null + fi + AC_MSG_RESULT([done])]) +rm -f conftest.file +]) + +# Copyright (C) 2009-2014 Free Software Foundation, Inc. +# +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# AM_SILENT_RULES([DEFAULT]) +# -------------------------- +# Enable less verbose build rules; with the default set to DEFAULT +# ("yes" being less verbose, "no" or empty being verbose). +AC_DEFUN([AM_SILENT_RULES], +[AC_ARG_ENABLE([silent-rules], [dnl +AS_HELP_STRING( + [--enable-silent-rules], + [less verbose build output (undo: "make V=1")]) +AS_HELP_STRING( + [--disable-silent-rules], + [verbose build output (undo: "make V=0")])dnl +]) +case $enable_silent_rules in @%:@ ((( + yes) AM_DEFAULT_VERBOSITY=0;; + no) AM_DEFAULT_VERBOSITY=1;; + *) AM_DEFAULT_VERBOSITY=m4_if([$1], [yes], [0], [1]);; +esac +dnl +dnl A few 'make' implementations (e.g., NonStop OS and NextStep) +dnl do not support nested variable expansions. +dnl See automake bug#9928 and bug#10237. +am_make=${MAKE-make} +AC_CACHE_CHECK([whether $am_make supports nested variables], + [am_cv_make_support_nested_variables], + [if AS_ECHO([['TRUE=$(BAR$(V)) +BAR0=false +BAR1=true +V=1 +am__doit: + @$(TRUE) +.PHONY: am__doit']]) | $am_make -f - >/dev/null 2>&1; then + am_cv_make_support_nested_variables=yes +else + am_cv_make_support_nested_variables=no +fi]) +if test $am_cv_make_support_nested_variables = yes; then + dnl Using '$V' instead of '$(V)' breaks IRIX make. + AM_V='$(V)' + AM_DEFAULT_V='$(AM_DEFAULT_VERBOSITY)' +else + AM_V=$AM_DEFAULT_VERBOSITY + AM_DEFAULT_V=$AM_DEFAULT_VERBOSITY +fi +AC_SUBST([AM_V])dnl +AM_SUBST_NOTMAKE([AM_V])dnl +AC_SUBST([AM_DEFAULT_V])dnl +AM_SUBST_NOTMAKE([AM_DEFAULT_V])dnl +AC_SUBST([AM_DEFAULT_VERBOSITY])dnl +AM_BACKSLASH='\' +AC_SUBST([AM_BACKSLASH])dnl +_AM_SUBST_NOTMAKE([AM_BACKSLASH])dnl +]) + +# Copyright (C) 2001-2014 Free Software Foundation, Inc. +# +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# AM_PROG_INSTALL_STRIP +# --------------------- +# One issue with vendor 'install' (even GNU) is that you can't +# specify the program used to strip binaries. This is especially +# annoying in cross-compiling environments, where the build's strip +# is unlikely to handle the host's binaries. +# Fortunately install-sh will honor a STRIPPROG variable, so we +# always use install-sh in "make install-strip", and initialize +# STRIPPROG with the value of the STRIP variable (set by the user). +AC_DEFUN([AM_PROG_INSTALL_STRIP], +[AC_REQUIRE([AM_PROG_INSTALL_SH])dnl +# Installed binaries are usually stripped using 'strip' when the user +# run "make install-strip". However 'strip' might not be the right +# tool to use in cross-compilation environments, therefore Automake +# will honor the 'STRIP' environment variable to overrule this program. +dnl Don't test for $cross_compiling = yes, because it might be 'maybe'. +if test "$cross_compiling" != no; then + AC_CHECK_TOOL([STRIP], [strip], :) +fi +INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s" +AC_SUBST([INSTALL_STRIP_PROGRAM])]) + +# Copyright (C) 2006-2014 Free Software Foundation, Inc. +# +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# _AM_SUBST_NOTMAKE(VARIABLE) +# --------------------------- +# Prevent Automake from outputting VARIABLE = @VARIABLE@ in Makefile.in. +# This macro is traced by Automake. +AC_DEFUN([_AM_SUBST_NOTMAKE]) + +# AM_SUBST_NOTMAKE(VARIABLE) +# -------------------------- +# Public sister of _AM_SUBST_NOTMAKE. +AC_DEFUN([AM_SUBST_NOTMAKE], [_AM_SUBST_NOTMAKE($@)]) + +# Check how to create a tarball. -*- Autoconf -*- + +# Copyright (C) 2004-2014 Free Software Foundation, Inc. +# +# This file is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# _AM_PROG_TAR(FORMAT) +# -------------------- +# Check how to create a tarball in format FORMAT. +# FORMAT should be one of 'v7', 'ustar', or 'pax'. +# +# Substitute a variable $(am__tar) that is a command +# writing to stdout a FORMAT-tarball containing the directory +# $tardir. +# tardir=directory && $(am__tar) > result.tar +# +# Substitute a variable $(am__untar) that extract such +# a tarball read from stdin. +# $(am__untar) < result.tar +# +AC_DEFUN([_AM_PROG_TAR], +[# Always define AMTAR for backward compatibility. Yes, it's still used +# in the wild :-( We should find a proper way to deprecate it ... +AC_SUBST([AMTAR], ['$${TAR-tar}']) + +# We'll loop over all known methods to create a tar archive until one works. +_am_tools='gnutar m4_if([$1], [ustar], [plaintar]) pax cpio none' + +m4_if([$1], [v7], + [am__tar='$${TAR-tar} chof - "$$tardir"' am__untar='$${TAR-tar} xf -'], + + [m4_case([$1], + [ustar], + [# The POSIX 1988 'ustar' format is defined with fixed-size fields. + # There is notably a 21 bits limit for the UID and the GID. In fact, + # the 'pax' utility can hang on bigger UID/GID (see automake bug#8343 + # and bug#13588). + am_max_uid=2097151 # 2^21 - 1 + am_max_gid=$am_max_uid + # The $UID and $GID variables are not portable, so we need to resort + # to the POSIX-mandated id(1) utility. Errors in the 'id' calls + # below are definitely unexpected, so allow the users to see them + # (that is, avoid stderr redirection). + am_uid=`id -u || echo unknown` + am_gid=`id -g || echo unknown` + AC_MSG_CHECKING([whether UID '$am_uid' is supported by ustar format]) + if test $am_uid -le $am_max_uid; then + AC_MSG_RESULT([yes]) + else + AC_MSG_RESULT([no]) + _am_tools=none + fi + AC_MSG_CHECKING([whether GID '$am_gid' is supported by ustar format]) + if test $am_gid -le $am_max_gid; then + AC_MSG_RESULT([yes]) + else + AC_MSG_RESULT([no]) + _am_tools=none + fi], + + [pax], + [], + + [m4_fatal([Unknown tar format])]) + + AC_MSG_CHECKING([how to create a $1 tar archive]) + + # Go ahead even if we have the value already cached. We do so because we + # need to set the values for the 'am__tar' and 'am__untar' variables. + _am_tools=${am_cv_prog_tar_$1-$_am_tools} + + for _am_tool in $_am_tools; do + case $_am_tool in + gnutar) + for _am_tar in tar gnutar gtar; do + AM_RUN_LOG([$_am_tar --version]) && break + done + am__tar="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$$tardir"' + am__tar_="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$tardir"' + am__untar="$_am_tar -xf -" + ;; + plaintar) + # Must skip GNU tar: if it does not support --format= it doesn't create + # ustar tarball either. + (tar --version) >/dev/null 2>&1 && continue + am__tar='tar chf - "$$tardir"' + am__tar_='tar chf - "$tardir"' + am__untar='tar xf -' + ;; + pax) + am__tar='pax -L -x $1 -w "$$tardir"' + am__tar_='pax -L -x $1 -w "$tardir"' + am__untar='pax -r' + ;; + cpio) + am__tar='find "$$tardir" -print | cpio -o -H $1 -L' + am__tar_='find "$tardir" -print | cpio -o -H $1 -L' + am__untar='cpio -i -H $1 -d' + ;; + none) + am__tar=false + am__tar_=false + am__untar=false + ;; + esac + + # If the value was cached, stop now. We just wanted to have am__tar + # and am__untar set. + test -n "${am_cv_prog_tar_$1}" && break + + # tar/untar a dummy directory, and stop if the command works. + rm -rf conftest.dir + mkdir conftest.dir + echo GrepMe > conftest.dir/file + AM_RUN_LOG([tardir=conftest.dir && eval $am__tar_ >conftest.tar]) + rm -rf conftest.dir + if test -s conftest.tar; then + AM_RUN_LOG([$am__untar /dev/null 2>&1 && break + fi + done + rm -rf conftest.dir + + AC_CACHE_VAL([am_cv_prog_tar_$1], [am_cv_prog_tar_$1=$_am_tool]) + AC_MSG_RESULT([$am_cv_prog_tar_$1])]) + +AC_SUBST([am__tar]) +AC_SUBST([am__untar]) +]) # _AM_PROG_TAR + +m4_include([m4/autobuild.m4]) +m4_include([m4/lib-ld.m4]) +m4_include([m4/lib-link.m4]) +m4_include([m4/lib-prefix.m4]) +m4_include([m4/libtool.m4]) +m4_include([m4/ltoptions.m4]) +m4_include([m4/ltsugar.m4]) +m4_include([m4/ltversion.m4]) +m4_include([m4/lt~obsolete.m4]) +m4_include([acinclude.m4]) diff --git a/vendor/libssh2/buildconf b/vendor/libssh2/buildconf new file mode 100755 index 000000000..558dcb660 --- /dev/null +++ b/vendor/libssh2/buildconf @@ -0,0 +1,22 @@ +#!/bin/sh + +LIBTOOLIZE="libtoolize" + +if [ "x`which $LIBTOOLIZE`" = "x" ]; then + LIBTOOLIZE="glibtoolize" +fi + +if [ "x`which $LIBTOOLIZE`" = "x" ]; then + echo "Neither libtoolize nor glibtoolize could be found!" + exit 1 +fi + +${LIBTOOLIZE} --copy --automake --force +${ACLOCAL:-aclocal} -I m4 $ACLOCAL_FLAGS +${AUTOHEADER:-autoheader} +# copy the private libssh2_config.h.in to the examples dir so that +# it can be included without pointing the include path to the private +# source dir +cp src/libssh2_config.h.in example/libssh2_config.h.in +${AUTOCONF:-autoconf} +${AUTOMAKE:-automake} --add-missing --copy diff --git a/vendor/libssh2/cmake/CheckFunctionExistsMayNeedLibrary.cmake b/vendor/libssh2/cmake/CheckFunctionExistsMayNeedLibrary.cmake new file mode 100644 index 000000000..8ac61abe5 --- /dev/null +++ b/vendor/libssh2/cmake/CheckFunctionExistsMayNeedLibrary.cmake @@ -0,0 +1,81 @@ +# Copyright (c) 2014 Alexander Lamaison +# +# Redistribution and use in source and binary forms, +# with or without modification, are permitted provided +# that the following conditions are met: +# +# Redistributions of source code must retain the above +# copyright notice, this list of conditions and the +# following disclaimer. +# +# Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials +# provided with the distribution. +# +# Neither the name of the copyright holder nor the names +# of any other contributors may be used to endorse or +# promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND +# CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, +# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES +# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY +# OF SUCH DAMAGE. + + +# - check_function_exists_maybe_need_library( [lib1 ... libn]) +# +# Check if function is available for linking, first without extra libraries, and +# then, if not found that way, linking in each optional library as well. This +# function is similar to autotools AC_SEARCH_LIBS. +# +# If the function if found, this will define . +# +# If the function was only found by linking in an additional library, this +# will define NEED_LIB_LIBX, where LIBX is the one of lib1 to libn that +# makes the function available, in uppercase. +# +# The following variables may be set before calling this macro to +# modify the way the check is run: +# +# CMAKE_REQUIRED_FLAGS = string of compile command line flags +# CMAKE_REQUIRED_DEFINITIONS = list of macros to define (-DFOO=bar) +# CMAKE_REQUIRED_INCLUDES = list of include directories +# CMAKE_REQUIRED_LIBRARIES = list of libraries to link +# + +include(CheckFunctionExists) +include(CheckLibraryExists) + +function(check_function_exists_may_need_library function variable) + + check_function_exists(${function} ${variable}) + + if(NOT ${variable}) + foreach(lib ${ARGN}) + string(TOUPPER ${lib} UP_LIB) + # Use new variable to prevent cache from previous step shortcircuiting + # new test + check_library_exists(${lib} ${function} "" HAVE_${function}_IN_${lib}) + if(HAVE_${function}_IN_${lib}) + set(${variable} 1 CACHE INTERNAL + "Function ${function} found in library ${lib}") + set(NEED_LIB_${UP_LIB} 1 CACHE INTERNAL + "Need to link ${lib}") + break() + endif() + endforeach() + endif() + +endfunction() \ No newline at end of file diff --git a/vendor/libssh2/cmake/CheckNonblockingSocketSupport.cmake b/vendor/libssh2/cmake/CheckNonblockingSocketSupport.cmake new file mode 100644 index 000000000..74f4776a6 --- /dev/null +++ b/vendor/libssh2/cmake/CheckNonblockingSocketSupport.cmake @@ -0,0 +1,119 @@ +include(CheckCSourceCompiles) + +# - check_nonblocking_socket_support() +# +# Check for how to set a socket to non-blocking state. There seems to exist +# four known different ways, with the one used almost everywhere being POSIX +# and XPG3, while the other different ways for different systems (old BSD, +# Windows and Amiga). +# +# One of the following variables will be set indicating the supported +# method (if any): +# HAVE_O_NONBLOCK +# HAVE_FIONBIO +# HAVE_IOCTLSOCKET +# HAVE_IOCTLSOCKET_CASE +# HAVE_SO_NONBLOCK +# HAVE_DISABLED_NONBLOCKING +# +# The following variables may be set before calling this macro to +# modify the way the check is run: +# +# CMAKE_REQUIRED_FLAGS = string of compile command line flags +# CMAKE_REQUIRED_DEFINITIONS = list of macros to define (-DFOO=bar) +# CMAKE_REQUIRED_INCLUDES = list of include directories +# CMAKE_REQUIRED_LIBRARIES = list of libraries to link +# +macro(check_nonblocking_socket_support) + # There are two known platforms (AIX 3.x and SunOS 4.1.x) where the + # O_NONBLOCK define is found but does not work. + check_c_source_compiles(" +#include +#include +#include + +#if defined(sun) || defined(__sun__) || defined(__SUNPRO_C) || defined(__SUNPRO_CC) +# if defined(__SVR4) || defined(__srv4__) +# define PLATFORM_SOLARIS +# else +# define PLATFORM_SUNOS4 +# endif +#endif +#if (defined(_AIX) || defined(__xlC__)) && !defined(_AIX41) +# define PLATFORM_AIX_V3 +#endif + +#if defined(PLATFORM_SUNOS4) || defined(PLATFORM_AIX_V3) || defined(__BEOS__) +#error \"O_NONBLOCK does not work on this platform\" +#endif + +int main() +{ + int socket; + int flags = fcntl(socket, F_SETFL, flags | O_NONBLOCK); +}" + HAVE_O_NONBLOCK) + + if(NOT HAVE_O_NONBLOCK) + check_c_source_compiles("/* FIONBIO test (old-style unix) */ +#include +#include + +int main() +{ + int socket; + int flags = ioctl(socket, FIONBIO, &flags); +}" + HAVE_FIONBIO) + + if(NOT HAVE_FIONBIO) + check_c_source_compiles("/* ioctlsocket test (Windows) */ +#undef inline +#ifndef WIN32_LEAN_AND_MEAN +#define WIN32_LEAN_AND_MEAN +#endif + +#include +#include + +int main() +{ + SOCKET sd; + unsigned long flags = 0; + sd = socket(0, 0, 0); + ioctlsocket(sd, FIONBIO, &flags); +}" + HAVE_IOCTLSOCKET) + + if(NOT HAVE_IOCTLSOCKET) + check_c_source_compiles("/* IoctlSocket test (Amiga?) */ +#include + +int main() +{ + int socket; + int flags = IoctlSocket(socket, FIONBIO, (long)1); +}" + HAVE_IOCTLSOCKET_CASE) + + if(NOT HAVE_IOCTLSOCKET_CASE) + check_c_source_compiles("/* SO_NONBLOCK test (BeOS) */ +#include + +int main() +{ + long b = 1; + int socket; + int flags = setsockopt(socket, SOL_SOCKET, SO_NONBLOCK, &b, sizeof(b)); +}" + HAVE_SO_NONBLOCK) + + if(NOT HAVE_SO_NONBLOCK) + # No non-blocking socket method found + set(HAVE_DISABLED_NONBLOCKING 1) + endif() + endif() + endif() + endif() + endif() +endmacro() \ No newline at end of file diff --git a/vendor/libssh2/cmake/CopyRuntimeDependencies.cmake b/vendor/libssh2/cmake/CopyRuntimeDependencies.cmake new file mode 100644 index 000000000..083f76268 --- /dev/null +++ b/vendor/libssh2/cmake/CopyRuntimeDependencies.cmake @@ -0,0 +1,72 @@ +# Copyright (c) 2014 Alexander Lamaison +# +# Redistribution and use in source and binary forms, +# with or without modification, are permitted provided +# that the following conditions are met: +# +# Redistributions of source code must retain the above +# copyright notice, this list of conditions and the +# following disclaimer. +# +# Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials +# provided with the distribution. +# +# Neither the name of the copyright holder nor the names +# of any other contributors may be used to endorse or +# promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND +# CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, +# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES +# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY +# OF SUCH DAMAGE. + +include(CMakeParseArguments) + +function(ADD_TARGET_TO_COPY_DEPENDENCIES) + set(options) + set(oneValueArgs TARGET) + set(multiValueArgs DEPENDENCIES BEFORE_TARGETS) + cmake_parse_arguments(COPY + "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN}) + + if(NOT COPY_DEPENDENCIES) + return() + endif() + + # Using a custom target to drive custom commands stops multiple + # parallel builds trying to kick off the commands at the same time + add_custom_target(${COPY_TARGET}) + + foreach(target ${COPY_BEFORE_TARGETS}) + add_dependencies(${target} ${COPY_TARGET}) + endforeach() + + foreach(dependency ${COPY_DEPENDENCIES}) + + add_custom_command( + TARGET ${COPY_TARGET} + DEPENDS ${dependency} + # Make directory first otherwise file is copied in place of + # directory instead of into it + COMMAND ${CMAKE_COMMAND} + ARGS -E make_directory ${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_CFG_INTDIR} + COMMAND ${CMAKE_COMMAND} + ARGS -E copy ${dependency} ${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_CFG_INTDIR} + VERBATIM) + + endforeach() + +endfunction() diff --git a/vendor/libssh2/cmake/FindLibgcrypt.cmake b/vendor/libssh2/cmake/FindLibgcrypt.cmake new file mode 100644 index 000000000..44a79873d --- /dev/null +++ b/vendor/libssh2/cmake/FindLibgcrypt.cmake @@ -0,0 +1,53 @@ +# Copyright (c) 2014 Alexander Lamaison +# +# Redistribution and use in source and binary forms, +# with or without modification, are permitted provided +# that the following conditions are met: +# +# Redistributions of source code must retain the above +# copyright notice, this list of conditions and the +# following disclaimer. +# +# Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials +# provided with the distribution. +# +# Neither the name of the copyright holder nor the names +# of any other contributors may be used to endorse or +# promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND +# CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, +# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES +# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY +# OF SUCH DAMAGE. + +# - Try to find Libgcrypt +# This will define all or none of: +# LIBGCRYPT_FOUND - if Libgcrypt headers and library was found +# LIBGCRYPT_INCLUDE_DIRS - The Libgcrypt include directories +# LIBGCRYPT_LIBRARIES - The libraries needed to use Libgcrypt + +find_path(LIBGCRYPT_INCLUDE_DIR gcrypt.h) + +find_library(LIBGCRYPT_LIBRARY NAMES gcrypt libgcrypt) + +set(LIBGCRYPT_LIBRARIES ${LIBGCRYPT_LIBRARY}) +set(LIBGCRYPT_INCLUDE_DIRS ${LIBGCRYPT_INCLUDE_DIR}) + +include(FindPackageHandleStandardArgs) +find_package_handle_standard_args(Libgcrypt DEFAULT_MSG + LIBGCRYPT_LIBRARY LIBGCRYPT_INCLUDE_DIR) + +mark_as_advanced(LIBGCRYPT_INCLUDE_DIR LIBGCRYPT_LIBRARY) \ No newline at end of file diff --git a/vendor/libssh2/cmake/SocketLibraries.cmake b/vendor/libssh2/cmake/SocketLibraries.cmake new file mode 100644 index 000000000..bfbbd711e --- /dev/null +++ b/vendor/libssh2/cmake/SocketLibraries.cmake @@ -0,0 +1,64 @@ +# Copyright (c) 2014 Alexander Lamaison +# +# Redistribution and use in source and binary forms, +# with or without modification, are permitted provided +# that the following conditions are met: +# +# Redistributions of source code must retain the above +# copyright notice, this list of conditions and the +# following disclaimer. +# +# Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials +# provided with the distribution. +# +# Neither the name of the copyright holder nor the names +# of any other contributors may be used to endorse or +# promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND +# CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, +# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES +# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY +# OF SUCH DAMAGE. + +# Some systems have their socket functions in a library. +# (Solaris -lsocket/-lnsl, Windows -lws2_32). This macro appends those +# libraries to the given list +macro(append_needed_socket_libraries LIBRARIES_LIST) + if(CMAKE_SYSTEM_NAME STREQUAL "Windows" AND CMAKE_SIZEOF_VOID_P EQUAL 4) + # x86 Windows uses STDCALL for these functions, so their names are mangled, + # meaning the platform checks don't work. Hardcoding these until we get + # a better solution. + set(HAVE_SOCKET 1) + set(HAVE_SELECT 1) + set(HAVE_INET_ADDR 1) + set(NEED_LIB_WS2_32 1) + else() + check_function_exists_may_need_library(socket HAVE_SOCKET socket ws2_32) + check_function_exists_may_need_library(select HAVE_SELECT ws2_32) + check_function_exists_may_need_library(inet_addr HAVE_INET_ADDR nsl ws2_32) + endif() + + if(NEED_LIB_SOCKET) + list(APPEND ${LIBRARIES_LIST} socket) + endif() + if(NEED_LIB_NSL) + list(APPEND ${LIBRARIES_LIST} nsl) + endif() + if(NEED_LIB_WS2_32) + list(APPEND ${LIBRARIES_LIST} ws2_32) + endif() + +endmacro() \ No newline at end of file diff --git a/vendor/libssh2/cmake/Toolchain-Linux-32.cmake b/vendor/libssh2/cmake/Toolchain-Linux-32.cmake new file mode 100644 index 000000000..6aad9b1e2 --- /dev/null +++ b/vendor/libssh2/cmake/Toolchain-Linux-32.cmake @@ -0,0 +1,42 @@ +# Copyright (c) 2014 Alexander Lamaison +# +# Redistribution and use in source and binary forms, +# with or without modification, are permitted provided +# that the following conditions are met: +# +# Redistributions of source code must retain the above +# copyright notice, this list of conditions and the +# following disclaimer. +# +# Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials +# provided with the distribution. +# +# Neither the name of the copyright holder nor the names +# of any other contributors may be used to endorse or +# promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND +# CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, +# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES +# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY +# OF SUCH DAMAGE. + +# Cross-compile 32-bit binary on 64-bit linux host +set(CMAKE_SYSTEM_NAME Linux) +set(CMAKE_SYSTEM_VERSION 1) +set(CMAKE_SYSTEM_PROCESSOR "i386") + +set(CMAKE_CXX_COMPILER_ARG1 "-m32") +set(CMAKE_C_COMPILER_ARG1 "-m32") \ No newline at end of file diff --git a/vendor/libssh2/cmake/max_warnings.cmake b/vendor/libssh2/cmake/max_warnings.cmake new file mode 100644 index 000000000..b176d302d --- /dev/null +++ b/vendor/libssh2/cmake/max_warnings.cmake @@ -0,0 +1,23 @@ +if(MSVC) + # Use the highest warning level for visual studio. + if(CMAKE_CXX_FLAGS MATCHES "/W[0-4]") + string(REGEX REPLACE "/W[0-4]" "/W4" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}") + else() + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /W4") + endif() + if(CMAKE_C_FLAGS MATCHES "/W[0-4]") + string(REGEX REPLACE "/W[0-4]" "/W4" CMAKE_C_FLAGS "${CMAKE_C_FLAGS}") + else() + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} /W4") + endif() + + # Disable broken warnings + add_definitions(-D_CRT_SECURE_NO_WARNINGS -D_CRT_NONSTDC_NO_DEPRECATE) +elseif(CMAKE_COMPILER_IS_GNUCC OR CMAKE_COMPILER_IS_GNUCXX) + if(NOT CMAKE_CXX_FLAGS MATCHES "-Wall") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall") + endif() + if(NOT CMAKE_C_FLAGS MATCHES "-Wall") + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wall") + endif() +endif() diff --git a/vendor/libssh2/compile b/vendor/libssh2/compile new file mode 100755 index 000000000..531136b06 --- /dev/null +++ b/vendor/libssh2/compile @@ -0,0 +1,347 @@ +#! /bin/sh +# Wrapper for compilers which do not understand '-c -o'. + +scriptversion=2012-10-14.11; # UTC + +# Copyright (C) 1999-2013 Free Software Foundation, Inc. +# Written by Tom Tromey . +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2, or (at your option) +# any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +# As a special exception to the GNU General Public License, if you +# distribute this file as part of a program that contains a +# configuration script generated by Autoconf, you may include it under +# the same distribution terms that you use for the rest of that program. + +# This file is maintained in Automake, please report +# bugs to or send patches to +# . + +nl=' +' + +# We need space, tab and new line, in precisely that order. Quoting is +# there to prevent tools from complaining about whitespace usage. +IFS=" "" $nl" + +file_conv= + +# func_file_conv build_file lazy +# Convert a $build file to $host form and store it in $file +# Currently only supports Windows hosts. If the determined conversion +# type is listed in (the comma separated) LAZY, no conversion will +# take place. +func_file_conv () +{ + file=$1 + case $file in + / | /[!/]*) # absolute file, and not a UNC file + if test -z "$file_conv"; then + # lazily determine how to convert abs files + case `uname -s` in + MINGW*) + file_conv=mingw + ;; + CYGWIN*) + file_conv=cygwin + ;; + *) + file_conv=wine + ;; + esac + fi + case $file_conv/,$2, in + *,$file_conv,*) + ;; + mingw/*) + file=`cmd //C echo "$file " | sed -e 's/"\(.*\) " *$/\1/'` + ;; + cygwin/*) + file=`cygpath -m "$file" || echo "$file"` + ;; + wine/*) + file=`winepath -w "$file" || echo "$file"` + ;; + esac + ;; + esac +} + +# func_cl_dashL linkdir +# Make cl look for libraries in LINKDIR +func_cl_dashL () +{ + func_file_conv "$1" + if test -z "$lib_path"; then + lib_path=$file + else + lib_path="$lib_path;$file" + fi + linker_opts="$linker_opts -LIBPATH:$file" +} + +# func_cl_dashl library +# Do a library search-path lookup for cl +func_cl_dashl () +{ + lib=$1 + found=no + save_IFS=$IFS + IFS=';' + for dir in $lib_path $LIB + do + IFS=$save_IFS + if $shared && test -f "$dir/$lib.dll.lib"; then + found=yes + lib=$dir/$lib.dll.lib + break + fi + if test -f "$dir/$lib.lib"; then + found=yes + lib=$dir/$lib.lib + break + fi + if test -f "$dir/lib$lib.a"; then + found=yes + lib=$dir/lib$lib.a + break + fi + done + IFS=$save_IFS + + if test "$found" != yes; then + lib=$lib.lib + fi +} + +# func_cl_wrapper cl arg... +# Adjust compile command to suit cl +func_cl_wrapper () +{ + # Assume a capable shell + lib_path= + shared=: + linker_opts= + for arg + do + if test -n "$eat"; then + eat= + else + case $1 in + -o) + # configure might choose to run compile as 'compile cc -o foo foo.c'. + eat=1 + case $2 in + *.o | *.[oO][bB][jJ]) + func_file_conv "$2" + set x "$@" -Fo"$file" + shift + ;; + *) + func_file_conv "$2" + set x "$@" -Fe"$file" + shift + ;; + esac + ;; + -I) + eat=1 + func_file_conv "$2" mingw + set x "$@" -I"$file" + shift + ;; + -I*) + func_file_conv "${1#-I}" mingw + set x "$@" -I"$file" + shift + ;; + -l) + eat=1 + func_cl_dashl "$2" + set x "$@" "$lib" + shift + ;; + -l*) + func_cl_dashl "${1#-l}" + set x "$@" "$lib" + shift + ;; + -L) + eat=1 + func_cl_dashL "$2" + ;; + -L*) + func_cl_dashL "${1#-L}" + ;; + -static) + shared=false + ;; + -Wl,*) + arg=${1#-Wl,} + save_ifs="$IFS"; IFS=',' + for flag in $arg; do + IFS="$save_ifs" + linker_opts="$linker_opts $flag" + done + IFS="$save_ifs" + ;; + -Xlinker) + eat=1 + linker_opts="$linker_opts $2" + ;; + -*) + set x "$@" "$1" + shift + ;; + *.cc | *.CC | *.cxx | *.CXX | *.[cC]++) + func_file_conv "$1" + set x "$@" -Tp"$file" + shift + ;; + *.c | *.cpp | *.CPP | *.lib | *.LIB | *.Lib | *.OBJ | *.obj | *.[oO]) + func_file_conv "$1" mingw + set x "$@" "$file" + shift + ;; + *) + set x "$@" "$1" + shift + ;; + esac + fi + shift + done + if test -n "$linker_opts"; then + linker_opts="-link$linker_opts" + fi + exec "$@" $linker_opts + exit 1 +} + +eat= + +case $1 in + '') + echo "$0: No command. Try '$0 --help' for more information." 1>&2 + exit 1; + ;; + -h | --h*) + cat <<\EOF +Usage: compile [--help] [--version] PROGRAM [ARGS] + +Wrapper for compilers which do not understand '-c -o'. +Remove '-o dest.o' from ARGS, run PROGRAM with the remaining +arguments, and rename the output as expected. + +If you are trying to build a whole package this is not the +right script to run: please start by reading the file 'INSTALL'. + +Report bugs to . +EOF + exit $? + ;; + -v | --v*) + echo "compile $scriptversion" + exit $? + ;; + cl | *[/\\]cl | cl.exe | *[/\\]cl.exe ) + func_cl_wrapper "$@" # Doesn't return... + ;; +esac + +ofile= +cfile= + +for arg +do + if test -n "$eat"; then + eat= + else + case $1 in + -o) + # configure might choose to run compile as 'compile cc -o foo foo.c'. + # So we strip '-o arg' only if arg is an object. + eat=1 + case $2 in + *.o | *.obj) + ofile=$2 + ;; + *) + set x "$@" -o "$2" + shift + ;; + esac + ;; + *.c) + cfile=$1 + set x "$@" "$1" + shift + ;; + *) + set x "$@" "$1" + shift + ;; + esac + fi + shift +done + +if test -z "$ofile" || test -z "$cfile"; then + # If no '-o' option was seen then we might have been invoked from a + # pattern rule where we don't need one. That is ok -- this is a + # normal compilation that the losing compiler can handle. If no + # '.c' file was seen then we are probably linking. That is also + # ok. + exec "$@" +fi + +# Name of file we expect compiler to create. +cofile=`echo "$cfile" | sed 's|^.*[\\/]||; s|^[a-zA-Z]:||; s/\.c$/.o/'` + +# Create the lock directory. +# Note: use '[/\\:.-]' here to ensure that we don't use the same name +# that we are using for the .o file. Also, base the name on the expected +# object file name, since that is what matters with a parallel build. +lockdir=`echo "$cofile" | sed -e 's|[/\\:.-]|_|g'`.d +while true; do + if mkdir "$lockdir" >/dev/null 2>&1; then + break + fi + sleep 1 +done +# FIXME: race condition here if user kills between mkdir and trap. +trap "rmdir '$lockdir'; exit 1" 1 2 15 + +# Run the compile. +"$@" +ret=$? + +if test -f "$cofile"; then + test "$cofile" = "$ofile" || mv "$cofile" "$ofile" +elif test -f "${cofile}bj"; then + test "${cofile}bj" = "$ofile" || mv "${cofile}bj" "$ofile" +fi + +rmdir "$lockdir" +exit $ret + +# Local Variables: +# mode: shell-script +# sh-indentation: 2 +# eval: (add-hook 'write-file-hooks 'time-stamp) +# time-stamp-start: "scriptversion=" +# time-stamp-format: "%:y-%02m-%02d.%02H" +# time-stamp-time-zone: "UTC" +# time-stamp-end: "; # UTC" +# End: diff --git a/vendor/libssh2/config.guess b/vendor/libssh2/config.guess new file mode 100755 index 000000000..d622a44e5 --- /dev/null +++ b/vendor/libssh2/config.guess @@ -0,0 +1,1530 @@ +#! /bin/sh +# Attempt to guess a canonical system name. +# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, +# 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +# 2011, 2012 Free Software Foundation, Inc. + +timestamp='2012-02-10' + +# This file is free software; you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, see . +# +# As a special exception to the GNU General Public License, if you +# distribute this file as part of a program that contains a +# configuration script generated by Autoconf, you may include it under +# the same distribution terms that you use for the rest of that program. + + +# Originally written by Per Bothner. Please send patches (context +# diff format) to and include a ChangeLog +# entry. +# +# This script attempts to guess a canonical system name similar to +# config.sub. If it succeeds, it prints the system name on stdout, and +# exits with 0. Otherwise, it exits with 1. +# +# You can get the latest version of this script from: +# http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess;hb=HEAD + +me=`echo "$0" | sed -e 's,.*/,,'` + +usage="\ +Usage: $0 [OPTION] + +Output the configuration name of the system \`$me' is run on. + +Operation modes: + -h, --help print this help, then exit + -t, --time-stamp print date of last modification, then exit + -v, --version print version number, then exit + +Report bugs and patches to ." + +version="\ +GNU config.guess ($timestamp) + +Originally written by Per Bothner. +Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, +2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012 +Free Software Foundation, Inc. + +This is free software; see the source for copying conditions. There is NO +warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE." + +help=" +Try \`$me --help' for more information." + +# Parse command line +while test $# -gt 0 ; do + case $1 in + --time-stamp | --time* | -t ) + echo "$timestamp" ; exit ;; + --version | -v ) + echo "$version" ; exit ;; + --help | --h* | -h ) + echo "$usage"; exit ;; + -- ) # Stop option processing + shift; break ;; + - ) # Use stdin as input. + break ;; + -* ) + echo "$me: invalid option $1$help" >&2 + exit 1 ;; + * ) + break ;; + esac +done + +if test $# != 0; then + echo "$me: too many arguments$help" >&2 + exit 1 +fi + +trap 'exit 1' 1 2 15 + +# CC_FOR_BUILD -- compiler used by this script. Note that the use of a +# compiler to aid in system detection is discouraged as it requires +# temporary files to be created and, as you can see below, it is a +# headache to deal with in a portable fashion. + +# Historically, `CC_FOR_BUILD' used to be named `HOST_CC'. We still +# use `HOST_CC' if defined, but it is deprecated. + +# Portable tmp directory creation inspired by the Autoconf team. + +set_cc_for_build=' +trap "exitcode=\$?; (rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null) && exit \$exitcode" 0 ; +trap "rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null; exit 1" 1 2 13 15 ; +: ${TMPDIR=/tmp} ; + { tmp=`(umask 077 && mktemp -d "$TMPDIR/cgXXXXXX") 2>/dev/null` && test -n "$tmp" && test -d "$tmp" ; } || + { test -n "$RANDOM" && tmp=$TMPDIR/cg$$-$RANDOM && (umask 077 && mkdir $tmp) ; } || + { tmp=$TMPDIR/cg-$$ && (umask 077 && mkdir $tmp) && echo "Warning: creating insecure temp directory" >&2 ; } || + { echo "$me: cannot create a temporary directory in $TMPDIR" >&2 ; exit 1 ; } ; +dummy=$tmp/dummy ; +tmpfiles="$dummy.c $dummy.o $dummy.rel $dummy" ; +case $CC_FOR_BUILD,$HOST_CC,$CC in + ,,) echo "int x;" > $dummy.c ; + for c in cc gcc c89 c99 ; do + if ($c -c -o $dummy.o $dummy.c) >/dev/null 2>&1 ; then + CC_FOR_BUILD="$c"; break ; + fi ; + done ; + if test x"$CC_FOR_BUILD" = x ; then + CC_FOR_BUILD=no_compiler_found ; + fi + ;; + ,,*) CC_FOR_BUILD=$CC ;; + ,*,*) CC_FOR_BUILD=$HOST_CC ;; +esac ; set_cc_for_build= ;' + +# This is needed to find uname on a Pyramid OSx when run in the BSD universe. +# (ghazi@noc.rutgers.edu 1994-08-24) +if (test -f /.attbin/uname) >/dev/null 2>&1 ; then + PATH=$PATH:/.attbin ; export PATH +fi + +UNAME_MACHINE=`(uname -m) 2>/dev/null` || UNAME_MACHINE=unknown +UNAME_RELEASE=`(uname -r) 2>/dev/null` || UNAME_RELEASE=unknown +UNAME_SYSTEM=`(uname -s) 2>/dev/null` || UNAME_SYSTEM=unknown +UNAME_VERSION=`(uname -v) 2>/dev/null` || UNAME_VERSION=unknown + +# Note: order is significant - the case branches are not exclusive. + +case "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" in + *:NetBSD:*:*) + # NetBSD (nbsd) targets should (where applicable) match one or + # more of the tuples: *-*-netbsdelf*, *-*-netbsdaout*, + # *-*-netbsdecoff* and *-*-netbsd*. For targets that recently + # switched to ELF, *-*-netbsd* would select the old + # object file format. This provides both forward + # compatibility and a consistent mechanism for selecting the + # object file format. + # + # Note: NetBSD doesn't particularly care about the vendor + # portion of the name. We always set it to "unknown". + sysctl="sysctl -n hw.machine_arch" + UNAME_MACHINE_ARCH=`(/sbin/$sysctl 2>/dev/null || \ + /usr/sbin/$sysctl 2>/dev/null || echo unknown)` + case "${UNAME_MACHINE_ARCH}" in + armeb) machine=armeb-unknown ;; + arm*) machine=arm-unknown ;; + sh3el) machine=shl-unknown ;; + sh3eb) machine=sh-unknown ;; + sh5el) machine=sh5le-unknown ;; + *) machine=${UNAME_MACHINE_ARCH}-unknown ;; + esac + # The Operating System including object format, if it has switched + # to ELF recently, or will in the future. + case "${UNAME_MACHINE_ARCH}" in + arm*|i386|m68k|ns32k|sh3*|sparc|vax) + eval $set_cc_for_build + if echo __ELF__ | $CC_FOR_BUILD -E - 2>/dev/null \ + | grep -q __ELF__ + then + # Once all utilities can be ECOFF (netbsdecoff) or a.out (netbsdaout). + # Return netbsd for either. FIX? + os=netbsd + else + os=netbsdelf + fi + ;; + *) + os=netbsd + ;; + esac + # The OS release + # Debian GNU/NetBSD machines have a different userland, and + # thus, need a distinct triplet. However, they do not need + # kernel version information, so it can be replaced with a + # suitable tag, in the style of linux-gnu. + case "${UNAME_VERSION}" in + Debian*) + release='-gnu' + ;; + *) + release=`echo ${UNAME_RELEASE}|sed -e 's/[-_].*/\./'` + ;; + esac + # Since CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM: + # contains redundant information, the shorter form: + # CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM is used. + echo "${machine}-${os}${release}" + exit ;; + *:OpenBSD:*:*) + UNAME_MACHINE_ARCH=`arch | sed 's/OpenBSD.//'` + echo ${UNAME_MACHINE_ARCH}-unknown-openbsd${UNAME_RELEASE} + exit ;; + *:ekkoBSD:*:*) + echo ${UNAME_MACHINE}-unknown-ekkobsd${UNAME_RELEASE} + exit ;; + *:SolidBSD:*:*) + echo ${UNAME_MACHINE}-unknown-solidbsd${UNAME_RELEASE} + exit ;; + macppc:MirBSD:*:*) + echo powerpc-unknown-mirbsd${UNAME_RELEASE} + exit ;; + *:MirBSD:*:*) + echo ${UNAME_MACHINE}-unknown-mirbsd${UNAME_RELEASE} + exit ;; + alpha:OSF1:*:*) + case $UNAME_RELEASE in + *4.0) + UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $3}'` + ;; + *5.*) + UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $4}'` + ;; + esac + # According to Compaq, /usr/sbin/psrinfo has been available on + # OSF/1 and Tru64 systems produced since 1995. I hope that + # covers most systems running today. This code pipes the CPU + # types through head -n 1, so we only detect the type of CPU 0. + ALPHA_CPU_TYPE=`/usr/sbin/psrinfo -v | sed -n -e 's/^ The alpha \(.*\) processor.*$/\1/p' | head -n 1` + case "$ALPHA_CPU_TYPE" in + "EV4 (21064)") + UNAME_MACHINE="alpha" ;; + "EV4.5 (21064)") + UNAME_MACHINE="alpha" ;; + "LCA4 (21066/21068)") + UNAME_MACHINE="alpha" ;; + "EV5 (21164)") + UNAME_MACHINE="alphaev5" ;; + "EV5.6 (21164A)") + UNAME_MACHINE="alphaev56" ;; + "EV5.6 (21164PC)") + UNAME_MACHINE="alphapca56" ;; + "EV5.7 (21164PC)") + UNAME_MACHINE="alphapca57" ;; + "EV6 (21264)") + UNAME_MACHINE="alphaev6" ;; + "EV6.7 (21264A)") + UNAME_MACHINE="alphaev67" ;; + "EV6.8CB (21264C)") + UNAME_MACHINE="alphaev68" ;; + "EV6.8AL (21264B)") + UNAME_MACHINE="alphaev68" ;; + "EV6.8CX (21264D)") + UNAME_MACHINE="alphaev68" ;; + "EV6.9A (21264/EV69A)") + UNAME_MACHINE="alphaev69" ;; + "EV7 (21364)") + UNAME_MACHINE="alphaev7" ;; + "EV7.9 (21364A)") + UNAME_MACHINE="alphaev79" ;; + esac + # A Pn.n version is a patched version. + # A Vn.n version is a released version. + # A Tn.n version is a released field test version. + # A Xn.n version is an unreleased experimental baselevel. + # 1.2 uses "1.2" for uname -r. + echo ${UNAME_MACHINE}-dec-osf`echo ${UNAME_RELEASE} | sed -e 's/^[PVTX]//' | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'` + # Reset EXIT trap before exiting to avoid spurious non-zero exit code. + exitcode=$? + trap '' 0 + exit $exitcode ;; + Alpha\ *:Windows_NT*:*) + # How do we know it's Interix rather than the generic POSIX subsystem? + # Should we change UNAME_MACHINE based on the output of uname instead + # of the specific Alpha model? + echo alpha-pc-interix + exit ;; + 21064:Windows_NT:50:3) + echo alpha-dec-winnt3.5 + exit ;; + Amiga*:UNIX_System_V:4.0:*) + echo m68k-unknown-sysv4 + exit ;; + *:[Aa]miga[Oo][Ss]:*:*) + echo ${UNAME_MACHINE}-unknown-amigaos + exit ;; + *:[Mm]orph[Oo][Ss]:*:*) + echo ${UNAME_MACHINE}-unknown-morphos + exit ;; + *:OS/390:*:*) + echo i370-ibm-openedition + exit ;; + *:z/VM:*:*) + echo s390-ibm-zvmoe + exit ;; + *:OS400:*:*) + echo powerpc-ibm-os400 + exit ;; + arm:RISC*:1.[012]*:*|arm:riscix:1.[012]*:*) + echo arm-acorn-riscix${UNAME_RELEASE} + exit ;; + arm:riscos:*:*|arm:RISCOS:*:*) + echo arm-unknown-riscos + exit ;; + SR2?01:HI-UX/MPP:*:* | SR8000:HI-UX/MPP:*:*) + echo hppa1.1-hitachi-hiuxmpp + exit ;; + Pyramid*:OSx*:*:* | MIS*:OSx*:*:* | MIS*:SMP_DC-OSx*:*:*) + # akee@wpdis03.wpafb.af.mil (Earle F. Ake) contributed MIS and NILE. + if test "`(/bin/universe) 2>/dev/null`" = att ; then + echo pyramid-pyramid-sysv3 + else + echo pyramid-pyramid-bsd + fi + exit ;; + NILE*:*:*:dcosx) + echo pyramid-pyramid-svr4 + exit ;; + DRS?6000:unix:4.0:6*) + echo sparc-icl-nx6 + exit ;; + DRS?6000:UNIX_SV:4.2*:7* | DRS?6000:isis:4.2*:7*) + case `/usr/bin/uname -p` in + sparc) echo sparc-icl-nx7; exit ;; + esac ;; + s390x:SunOS:*:*) + echo ${UNAME_MACHINE}-ibm-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` + exit ;; + sun4H:SunOS:5.*:*) + echo sparc-hal-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` + exit ;; + sun4*:SunOS:5.*:* | tadpole*:SunOS:5.*:*) + echo sparc-sun-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` + exit ;; + i86pc:AuroraUX:5.*:* | i86xen:AuroraUX:5.*:*) + echo i386-pc-auroraux${UNAME_RELEASE} + exit ;; + i86pc:SunOS:5.*:* | i86xen:SunOS:5.*:*) + eval $set_cc_for_build + SUN_ARCH="i386" + # If there is a compiler, see if it is configured for 64-bit objects. + # Note that the Sun cc does not turn __LP64__ into 1 like gcc does. + # This test works for both compilers. + if [ "$CC_FOR_BUILD" != 'no_compiler_found' ]; then + if (echo '#ifdef __amd64'; echo IS_64BIT_ARCH; echo '#endif') | \ + (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | \ + grep IS_64BIT_ARCH >/dev/null + then + SUN_ARCH="x86_64" + fi + fi + echo ${SUN_ARCH}-pc-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` + exit ;; + sun4*:SunOS:6*:*) + # According to config.sub, this is the proper way to canonicalize + # SunOS6. Hard to guess exactly what SunOS6 will be like, but + # it's likely to be more like Solaris than SunOS4. + echo sparc-sun-solaris3`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` + exit ;; + sun4*:SunOS:*:*) + case "`/usr/bin/arch -k`" in + Series*|S4*) + UNAME_RELEASE=`uname -v` + ;; + esac + # Japanese Language versions have a version number like `4.1.3-JL'. + echo sparc-sun-sunos`echo ${UNAME_RELEASE}|sed -e 's/-/_/'` + exit ;; + sun3*:SunOS:*:*) + echo m68k-sun-sunos${UNAME_RELEASE} + exit ;; + sun*:*:4.2BSD:*) + UNAME_RELEASE=`(sed 1q /etc/motd | awk '{print substr($5,1,3)}') 2>/dev/null` + test "x${UNAME_RELEASE}" = "x" && UNAME_RELEASE=3 + case "`/bin/arch`" in + sun3) + echo m68k-sun-sunos${UNAME_RELEASE} + ;; + sun4) + echo sparc-sun-sunos${UNAME_RELEASE} + ;; + esac + exit ;; + aushp:SunOS:*:*) + echo sparc-auspex-sunos${UNAME_RELEASE} + exit ;; + # The situation for MiNT is a little confusing. The machine name + # can be virtually everything (everything which is not + # "atarist" or "atariste" at least should have a processor + # > m68000). The system name ranges from "MiNT" over "FreeMiNT" + # to the lowercase version "mint" (or "freemint"). Finally + # the system name "TOS" denotes a system which is actually not + # MiNT. But MiNT is downward compatible to TOS, so this should + # be no problem. + atarist[e]:*MiNT:*:* | atarist[e]:*mint:*:* | atarist[e]:*TOS:*:*) + echo m68k-atari-mint${UNAME_RELEASE} + exit ;; + atari*:*MiNT:*:* | atari*:*mint:*:* | atarist[e]:*TOS:*:*) + echo m68k-atari-mint${UNAME_RELEASE} + exit ;; + *falcon*:*MiNT:*:* | *falcon*:*mint:*:* | *falcon*:*TOS:*:*) + echo m68k-atari-mint${UNAME_RELEASE} + exit ;; + milan*:*MiNT:*:* | milan*:*mint:*:* | *milan*:*TOS:*:*) + echo m68k-milan-mint${UNAME_RELEASE} + exit ;; + hades*:*MiNT:*:* | hades*:*mint:*:* | *hades*:*TOS:*:*) + echo m68k-hades-mint${UNAME_RELEASE} + exit ;; + *:*MiNT:*:* | *:*mint:*:* | *:*TOS:*:*) + echo m68k-unknown-mint${UNAME_RELEASE} + exit ;; + m68k:machten:*:*) + echo m68k-apple-machten${UNAME_RELEASE} + exit ;; + powerpc:machten:*:*) + echo powerpc-apple-machten${UNAME_RELEASE} + exit ;; + RISC*:Mach:*:*) + echo mips-dec-mach_bsd4.3 + exit ;; + RISC*:ULTRIX:*:*) + echo mips-dec-ultrix${UNAME_RELEASE} + exit ;; + VAX*:ULTRIX*:*:*) + echo vax-dec-ultrix${UNAME_RELEASE} + exit ;; + 2020:CLIX:*:* | 2430:CLIX:*:*) + echo clipper-intergraph-clix${UNAME_RELEASE} + exit ;; + mips:*:*:UMIPS | mips:*:*:RISCos) + eval $set_cc_for_build + sed 's/^ //' << EOF >$dummy.c +#ifdef __cplusplus +#include /* for printf() prototype */ + int main (int argc, char *argv[]) { +#else + int main (argc, argv) int argc; char *argv[]; { +#endif + #if defined (host_mips) && defined (MIPSEB) + #if defined (SYSTYPE_SYSV) + printf ("mips-mips-riscos%ssysv\n", argv[1]); exit (0); + #endif + #if defined (SYSTYPE_SVR4) + printf ("mips-mips-riscos%ssvr4\n", argv[1]); exit (0); + #endif + #if defined (SYSTYPE_BSD43) || defined(SYSTYPE_BSD) + printf ("mips-mips-riscos%sbsd\n", argv[1]); exit (0); + #endif + #endif + exit (-1); + } +EOF + $CC_FOR_BUILD -o $dummy $dummy.c && + dummyarg=`echo "${UNAME_RELEASE}" | sed -n 's/\([0-9]*\).*/\1/p'` && + SYSTEM_NAME=`$dummy $dummyarg` && + { echo "$SYSTEM_NAME"; exit; } + echo mips-mips-riscos${UNAME_RELEASE} + exit ;; + Motorola:PowerMAX_OS:*:*) + echo powerpc-motorola-powermax + exit ;; + Motorola:*:4.3:PL8-*) + echo powerpc-harris-powermax + exit ;; + Night_Hawk:*:*:PowerMAX_OS | Synergy:PowerMAX_OS:*:*) + echo powerpc-harris-powermax + exit ;; + Night_Hawk:Power_UNIX:*:*) + echo powerpc-harris-powerunix + exit ;; + m88k:CX/UX:7*:*) + echo m88k-harris-cxux7 + exit ;; + m88k:*:4*:R4*) + echo m88k-motorola-sysv4 + exit ;; + m88k:*:3*:R3*) + echo m88k-motorola-sysv3 + exit ;; + AViiON:dgux:*:*) + # DG/UX returns AViiON for all architectures + UNAME_PROCESSOR=`/usr/bin/uname -p` + if [ $UNAME_PROCESSOR = mc88100 ] || [ $UNAME_PROCESSOR = mc88110 ] + then + if [ ${TARGET_BINARY_INTERFACE}x = m88kdguxelfx ] || \ + [ ${TARGET_BINARY_INTERFACE}x = x ] + then + echo m88k-dg-dgux${UNAME_RELEASE} + else + echo m88k-dg-dguxbcs${UNAME_RELEASE} + fi + else + echo i586-dg-dgux${UNAME_RELEASE} + fi + exit ;; + M88*:DolphinOS:*:*) # DolphinOS (SVR3) + echo m88k-dolphin-sysv3 + exit ;; + M88*:*:R3*:*) + # Delta 88k system running SVR3 + echo m88k-motorola-sysv3 + exit ;; + XD88*:*:*:*) # Tektronix XD88 system running UTekV (SVR3) + echo m88k-tektronix-sysv3 + exit ;; + Tek43[0-9][0-9]:UTek:*:*) # Tektronix 4300 system running UTek (BSD) + echo m68k-tektronix-bsd + exit ;; + *:IRIX*:*:*) + echo mips-sgi-irix`echo ${UNAME_RELEASE}|sed -e 's/-/_/g'` + exit ;; + ????????:AIX?:[12].1:2) # AIX 2.2.1 or AIX 2.1.1 is RT/PC AIX. + echo romp-ibm-aix # uname -m gives an 8 hex-code CPU id + exit ;; # Note that: echo "'`uname -s`'" gives 'AIX ' + i*86:AIX:*:*) + echo i386-ibm-aix + exit ;; + ia64:AIX:*:*) + if [ -x /usr/bin/oslevel ] ; then + IBM_REV=`/usr/bin/oslevel` + else + IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE} + fi + echo ${UNAME_MACHINE}-ibm-aix${IBM_REV} + exit ;; + *:AIX:2:3) + if grep bos325 /usr/include/stdio.h >/dev/null 2>&1; then + eval $set_cc_for_build + sed 's/^ //' << EOF >$dummy.c + #include + + main() + { + if (!__power_pc()) + exit(1); + puts("powerpc-ibm-aix3.2.5"); + exit(0); + } +EOF + if $CC_FOR_BUILD -o $dummy $dummy.c && SYSTEM_NAME=`$dummy` + then + echo "$SYSTEM_NAME" + else + echo rs6000-ibm-aix3.2.5 + fi + elif grep bos324 /usr/include/stdio.h >/dev/null 2>&1; then + echo rs6000-ibm-aix3.2.4 + else + echo rs6000-ibm-aix3.2 + fi + exit ;; + *:AIX:*:[4567]) + IBM_CPU_ID=`/usr/sbin/lsdev -C -c processor -S available | sed 1q | awk '{ print $1 }'` + if /usr/sbin/lsattr -El ${IBM_CPU_ID} | grep ' POWER' >/dev/null 2>&1; then + IBM_ARCH=rs6000 + else + IBM_ARCH=powerpc + fi + if [ -x /usr/bin/oslevel ] ; then + IBM_REV=`/usr/bin/oslevel` + else + IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE} + fi + echo ${IBM_ARCH}-ibm-aix${IBM_REV} + exit ;; + *:AIX:*:*) + echo rs6000-ibm-aix + exit ;; + ibmrt:4.4BSD:*|romp-ibm:BSD:*) + echo romp-ibm-bsd4.4 + exit ;; + ibmrt:*BSD:*|romp-ibm:BSD:*) # covers RT/PC BSD and + echo romp-ibm-bsd${UNAME_RELEASE} # 4.3 with uname added to + exit ;; # report: romp-ibm BSD 4.3 + *:BOSX:*:*) + echo rs6000-bull-bosx + exit ;; + DPX/2?00:B.O.S.:*:*) + echo m68k-bull-sysv3 + exit ;; + 9000/[34]??:4.3bsd:1.*:*) + echo m68k-hp-bsd + exit ;; + hp300:4.4BSD:*:* | 9000/[34]??:4.3bsd:2.*:*) + echo m68k-hp-bsd4.4 + exit ;; + 9000/[34678]??:HP-UX:*:*) + HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'` + case "${UNAME_MACHINE}" in + 9000/31? ) HP_ARCH=m68000 ;; + 9000/[34]?? ) HP_ARCH=m68k ;; + 9000/[678][0-9][0-9]) + if [ -x /usr/bin/getconf ]; then + sc_cpu_version=`/usr/bin/getconf SC_CPU_VERSION 2>/dev/null` + sc_kernel_bits=`/usr/bin/getconf SC_KERNEL_BITS 2>/dev/null` + case "${sc_cpu_version}" in + 523) HP_ARCH="hppa1.0" ;; # CPU_PA_RISC1_0 + 528) HP_ARCH="hppa1.1" ;; # CPU_PA_RISC1_1 + 532) # CPU_PA_RISC2_0 + case "${sc_kernel_bits}" in + 32) HP_ARCH="hppa2.0n" ;; + 64) HP_ARCH="hppa2.0w" ;; + '') HP_ARCH="hppa2.0" ;; # HP-UX 10.20 + esac ;; + esac + fi + if [ "${HP_ARCH}" = "" ]; then + eval $set_cc_for_build + sed 's/^ //' << EOF >$dummy.c + + #define _HPUX_SOURCE + #include + #include + + int main () + { + #if defined(_SC_KERNEL_BITS) + long bits = sysconf(_SC_KERNEL_BITS); + #endif + long cpu = sysconf (_SC_CPU_VERSION); + + switch (cpu) + { + case CPU_PA_RISC1_0: puts ("hppa1.0"); break; + case CPU_PA_RISC1_1: puts ("hppa1.1"); break; + case CPU_PA_RISC2_0: + #if defined(_SC_KERNEL_BITS) + switch (bits) + { + case 64: puts ("hppa2.0w"); break; + case 32: puts ("hppa2.0n"); break; + default: puts ("hppa2.0"); break; + } break; + #else /* !defined(_SC_KERNEL_BITS) */ + puts ("hppa2.0"); break; + #endif + default: puts ("hppa1.0"); break; + } + exit (0); + } +EOF + (CCOPTS= $CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null) && HP_ARCH=`$dummy` + test -z "$HP_ARCH" && HP_ARCH=hppa + fi ;; + esac + if [ ${HP_ARCH} = "hppa2.0w" ] + then + eval $set_cc_for_build + + # hppa2.0w-hp-hpux* has a 64-bit kernel and a compiler generating + # 32-bit code. hppa64-hp-hpux* has the same kernel and a compiler + # generating 64-bit code. GNU and HP use different nomenclature: + # + # $ CC_FOR_BUILD=cc ./config.guess + # => hppa2.0w-hp-hpux11.23 + # $ CC_FOR_BUILD="cc +DA2.0w" ./config.guess + # => hppa64-hp-hpux11.23 + + if echo __LP64__ | (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | + grep -q __LP64__ + then + HP_ARCH="hppa2.0w" + else + HP_ARCH="hppa64" + fi + fi + echo ${HP_ARCH}-hp-hpux${HPUX_REV} + exit ;; + ia64:HP-UX:*:*) + HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'` + echo ia64-hp-hpux${HPUX_REV} + exit ;; + 3050*:HI-UX:*:*) + eval $set_cc_for_build + sed 's/^ //' << EOF >$dummy.c + #include + int + main () + { + long cpu = sysconf (_SC_CPU_VERSION); + /* The order matters, because CPU_IS_HP_MC68K erroneously returns + true for CPU_PA_RISC1_0. CPU_IS_PA_RISC returns correct + results, however. */ + if (CPU_IS_PA_RISC (cpu)) + { + switch (cpu) + { + case CPU_PA_RISC1_0: puts ("hppa1.0-hitachi-hiuxwe2"); break; + case CPU_PA_RISC1_1: puts ("hppa1.1-hitachi-hiuxwe2"); break; + case CPU_PA_RISC2_0: puts ("hppa2.0-hitachi-hiuxwe2"); break; + default: puts ("hppa-hitachi-hiuxwe2"); break; + } + } + else if (CPU_IS_HP_MC68K (cpu)) + puts ("m68k-hitachi-hiuxwe2"); + else puts ("unknown-hitachi-hiuxwe2"); + exit (0); + } +EOF + $CC_FOR_BUILD -o $dummy $dummy.c && SYSTEM_NAME=`$dummy` && + { echo "$SYSTEM_NAME"; exit; } + echo unknown-hitachi-hiuxwe2 + exit ;; + 9000/7??:4.3bsd:*:* | 9000/8?[79]:4.3bsd:*:* ) + echo hppa1.1-hp-bsd + exit ;; + 9000/8??:4.3bsd:*:*) + echo hppa1.0-hp-bsd + exit ;; + *9??*:MPE/iX:*:* | *3000*:MPE/iX:*:*) + echo hppa1.0-hp-mpeix + exit ;; + hp7??:OSF1:*:* | hp8?[79]:OSF1:*:* ) + echo hppa1.1-hp-osf + exit ;; + hp8??:OSF1:*:*) + echo hppa1.0-hp-osf + exit ;; + i*86:OSF1:*:*) + if [ -x /usr/sbin/sysversion ] ; then + echo ${UNAME_MACHINE}-unknown-osf1mk + else + echo ${UNAME_MACHINE}-unknown-osf1 + fi + exit ;; + parisc*:Lites*:*:*) + echo hppa1.1-hp-lites + exit ;; + C1*:ConvexOS:*:* | convex:ConvexOS:C1*:*) + echo c1-convex-bsd + exit ;; + C2*:ConvexOS:*:* | convex:ConvexOS:C2*:*) + if getsysinfo -f scalar_acc + then echo c32-convex-bsd + else echo c2-convex-bsd + fi + exit ;; + C34*:ConvexOS:*:* | convex:ConvexOS:C34*:*) + echo c34-convex-bsd + exit ;; + C38*:ConvexOS:*:* | convex:ConvexOS:C38*:*) + echo c38-convex-bsd + exit ;; + C4*:ConvexOS:*:* | convex:ConvexOS:C4*:*) + echo c4-convex-bsd + exit ;; + CRAY*Y-MP:*:*:*) + echo ymp-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' + exit ;; + CRAY*[A-Z]90:*:*:*) + echo ${UNAME_MACHINE}-cray-unicos${UNAME_RELEASE} \ + | sed -e 's/CRAY.*\([A-Z]90\)/\1/' \ + -e y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/ \ + -e 's/\.[^.]*$/.X/' + exit ;; + CRAY*TS:*:*:*) + echo t90-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' + exit ;; + CRAY*T3E:*:*:*) + echo alphaev5-cray-unicosmk${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' + exit ;; + CRAY*SV1:*:*:*) + echo sv1-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' + exit ;; + *:UNICOS/mp:*:*) + echo craynv-cray-unicosmp${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' + exit ;; + F30[01]:UNIX_System_V:*:* | F700:UNIX_System_V:*:*) + FUJITSU_PROC=`uname -m | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'` + FUJITSU_SYS=`uname -p | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/\///'` + FUJITSU_REL=`echo ${UNAME_RELEASE} | sed -e 's/ /_/'` + echo "${FUJITSU_PROC}-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}" + exit ;; + 5000:UNIX_System_V:4.*:*) + FUJITSU_SYS=`uname -p | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/\///'` + FUJITSU_REL=`echo ${UNAME_RELEASE} | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/ /_/'` + echo "sparc-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}" + exit ;; + i*86:BSD/386:*:* | i*86:BSD/OS:*:* | *:Ascend\ Embedded/OS:*:*) + echo ${UNAME_MACHINE}-pc-bsdi${UNAME_RELEASE} + exit ;; + sparc*:BSD/OS:*:*) + echo sparc-unknown-bsdi${UNAME_RELEASE} + exit ;; + *:BSD/OS:*:*) + echo ${UNAME_MACHINE}-unknown-bsdi${UNAME_RELEASE} + exit ;; + *:FreeBSD:*:*) + UNAME_PROCESSOR=`/usr/bin/uname -p` + case ${UNAME_PROCESSOR} in + amd64) + echo x86_64-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;; + *) + echo ${UNAME_PROCESSOR}-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;; + esac + exit ;; + i*:CYGWIN*:*) + echo ${UNAME_MACHINE}-pc-cygwin + exit ;; + *:MINGW*:*) + echo ${UNAME_MACHINE}-pc-mingw32 + exit ;; + i*:MSYS*:*) + echo ${UNAME_MACHINE}-pc-msys + exit ;; + i*:windows32*:*) + # uname -m includes "-pc" on this system. + echo ${UNAME_MACHINE}-mingw32 + exit ;; + i*:PW*:*) + echo ${UNAME_MACHINE}-pc-pw32 + exit ;; + *:Interix*:*) + case ${UNAME_MACHINE} in + x86) + echo i586-pc-interix${UNAME_RELEASE} + exit ;; + authenticamd | genuineintel | EM64T) + echo x86_64-unknown-interix${UNAME_RELEASE} + exit ;; + IA64) + echo ia64-unknown-interix${UNAME_RELEASE} + exit ;; + esac ;; + [345]86:Windows_95:* | [345]86:Windows_98:* | [345]86:Windows_NT:*) + echo i${UNAME_MACHINE}-pc-mks + exit ;; + 8664:Windows_NT:*) + echo x86_64-pc-mks + exit ;; + i*:Windows_NT*:* | Pentium*:Windows_NT*:*) + # How do we know it's Interix rather than the generic POSIX subsystem? + # It also conflicts with pre-2.0 versions of AT&T UWIN. Should we + # UNAME_MACHINE based on the output of uname instead of i386? + echo i586-pc-interix + exit ;; + i*:UWIN*:*) + echo ${UNAME_MACHINE}-pc-uwin + exit ;; + amd64:CYGWIN*:*:* | x86_64:CYGWIN*:*:*) + echo x86_64-unknown-cygwin + exit ;; + p*:CYGWIN*:*) + echo powerpcle-unknown-cygwin + exit ;; + prep*:SunOS:5.*:*) + echo powerpcle-unknown-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` + exit ;; + *:GNU:*:*) + # the GNU system + echo `echo ${UNAME_MACHINE}|sed -e 's,[-/].*$,,'`-unknown-gnu`echo ${UNAME_RELEASE}|sed -e 's,/.*$,,'` + exit ;; + *:GNU/*:*:*) + # other systems with GNU libc and userland + echo ${UNAME_MACHINE}-unknown-`echo ${UNAME_SYSTEM} | sed 's,^[^/]*/,,' | tr '[A-Z]' '[a-z]'``echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`-gnu + exit ;; + i*86:Minix:*:*) + echo ${UNAME_MACHINE}-pc-minix + exit ;; + aarch64:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-gnu + exit ;; + aarch64_be:Linux:*:*) + UNAME_MACHINE=aarch64_be + echo ${UNAME_MACHINE}-unknown-linux-gnu + exit ;; + alpha:Linux:*:*) + case `sed -n '/^cpu model/s/^.*: \(.*\)/\1/p' < /proc/cpuinfo` in + EV5) UNAME_MACHINE=alphaev5 ;; + EV56) UNAME_MACHINE=alphaev56 ;; + PCA56) UNAME_MACHINE=alphapca56 ;; + PCA57) UNAME_MACHINE=alphapca56 ;; + EV6) UNAME_MACHINE=alphaev6 ;; + EV67) UNAME_MACHINE=alphaev67 ;; + EV68*) UNAME_MACHINE=alphaev68 ;; + esac + objdump --private-headers /bin/sh | grep -q ld.so.1 + if test "$?" = 0 ; then LIBC="libc1" ; else LIBC="" ; fi + echo ${UNAME_MACHINE}-unknown-linux-gnu${LIBC} + exit ;; + arm*:Linux:*:*) + eval $set_cc_for_build + if echo __ARM_EABI__ | $CC_FOR_BUILD -E - 2>/dev/null \ + | grep -q __ARM_EABI__ + then + echo ${UNAME_MACHINE}-unknown-linux-gnu + else + if echo __ARM_PCS_VFP | $CC_FOR_BUILD -E - 2>/dev/null \ + | grep -q __ARM_PCS_VFP + then + echo ${UNAME_MACHINE}-unknown-linux-gnueabi + else + echo ${UNAME_MACHINE}-unknown-linux-gnueabihf + fi + fi + exit ;; + avr32*:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-gnu + exit ;; + cris:Linux:*:*) + echo ${UNAME_MACHINE}-axis-linux-gnu + exit ;; + crisv32:Linux:*:*) + echo ${UNAME_MACHINE}-axis-linux-gnu + exit ;; + frv:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-gnu + exit ;; + hexagon:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-gnu + exit ;; + i*86:Linux:*:*) + LIBC=gnu + eval $set_cc_for_build + sed 's/^ //' << EOF >$dummy.c + #ifdef __dietlibc__ + LIBC=dietlibc + #endif +EOF + eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep '^LIBC'` + echo "${UNAME_MACHINE}-pc-linux-${LIBC}" + exit ;; + ia64:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-gnu + exit ;; + m32r*:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-gnu + exit ;; + m68*:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-gnu + exit ;; + mips:Linux:*:* | mips64:Linux:*:*) + eval $set_cc_for_build + sed 's/^ //' << EOF >$dummy.c + #undef CPU + #undef ${UNAME_MACHINE} + #undef ${UNAME_MACHINE}el + #if defined(__MIPSEL__) || defined(__MIPSEL) || defined(_MIPSEL) || defined(MIPSEL) + CPU=${UNAME_MACHINE}el + #else + #if defined(__MIPSEB__) || defined(__MIPSEB) || defined(_MIPSEB) || defined(MIPSEB) + CPU=${UNAME_MACHINE} + #else + CPU= + #endif + #endif +EOF + eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep '^CPU'` + test x"${CPU}" != x && { echo "${CPU}-unknown-linux-gnu"; exit; } + ;; + or32:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-gnu + exit ;; + padre:Linux:*:*) + echo sparc-unknown-linux-gnu + exit ;; + parisc64:Linux:*:* | hppa64:Linux:*:*) + echo hppa64-unknown-linux-gnu + exit ;; + parisc:Linux:*:* | hppa:Linux:*:*) + # Look for CPU level + case `grep '^cpu[^a-z]*:' /proc/cpuinfo 2>/dev/null | cut -d' ' -f2` in + PA7*) echo hppa1.1-unknown-linux-gnu ;; + PA8*) echo hppa2.0-unknown-linux-gnu ;; + *) echo hppa-unknown-linux-gnu ;; + esac + exit ;; + ppc64:Linux:*:*) + echo powerpc64-unknown-linux-gnu + exit ;; + ppc:Linux:*:*) + echo powerpc-unknown-linux-gnu + exit ;; + s390:Linux:*:* | s390x:Linux:*:*) + echo ${UNAME_MACHINE}-ibm-linux + exit ;; + sh64*:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-gnu + exit ;; + sh*:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-gnu + exit ;; + sparc:Linux:*:* | sparc64:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-gnu + exit ;; + tile*:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-gnu + exit ;; + vax:Linux:*:*) + echo ${UNAME_MACHINE}-dec-linux-gnu + exit ;; + x86_64:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-gnu + exit ;; + xtensa*:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-gnu + exit ;; + i*86:DYNIX/ptx:4*:*) + # ptx 4.0 does uname -s correctly, with DYNIX/ptx in there. + # earlier versions are messed up and put the nodename in both + # sysname and nodename. + echo i386-sequent-sysv4 + exit ;; + i*86:UNIX_SV:4.2MP:2.*) + # Unixware is an offshoot of SVR4, but it has its own version + # number series starting with 2... + # I am not positive that other SVR4 systems won't match this, + # I just have to hope. -- rms. + # Use sysv4.2uw... so that sysv4* matches it. + echo ${UNAME_MACHINE}-pc-sysv4.2uw${UNAME_VERSION} + exit ;; + i*86:OS/2:*:*) + # If we were able to find `uname', then EMX Unix compatibility + # is probably installed. + echo ${UNAME_MACHINE}-pc-os2-emx + exit ;; + i*86:XTS-300:*:STOP) + echo ${UNAME_MACHINE}-unknown-stop + exit ;; + i*86:atheos:*:*) + echo ${UNAME_MACHINE}-unknown-atheos + exit ;; + i*86:syllable:*:*) + echo ${UNAME_MACHINE}-pc-syllable + exit ;; + i*86:LynxOS:2.*:* | i*86:LynxOS:3.[01]*:* | i*86:LynxOS:4.[02]*:*) + echo i386-unknown-lynxos${UNAME_RELEASE} + exit ;; + i*86:*DOS:*:*) + echo ${UNAME_MACHINE}-pc-msdosdjgpp + exit ;; + i*86:*:4.*:* | i*86:SYSTEM_V:4.*:*) + UNAME_REL=`echo ${UNAME_RELEASE} | sed 's/\/MP$//'` + if grep Novell /usr/include/link.h >/dev/null 2>/dev/null; then + echo ${UNAME_MACHINE}-univel-sysv${UNAME_REL} + else + echo ${UNAME_MACHINE}-pc-sysv${UNAME_REL} + fi + exit ;; + i*86:*:5:[678]*) + # UnixWare 7.x, OpenUNIX and OpenServer 6. + case `/bin/uname -X | grep "^Machine"` in + *486*) UNAME_MACHINE=i486 ;; + *Pentium) UNAME_MACHINE=i586 ;; + *Pent*|*Celeron) UNAME_MACHINE=i686 ;; + esac + echo ${UNAME_MACHINE}-unknown-sysv${UNAME_RELEASE}${UNAME_SYSTEM}${UNAME_VERSION} + exit ;; + i*86:*:3.2:*) + if test -f /usr/options/cb.name; then + UNAME_REL=`sed -n 's/.*Version //p' /dev/null >/dev/null ; then + UNAME_REL=`(/bin/uname -X|grep Release|sed -e 's/.*= //')` + (/bin/uname -X|grep i80486 >/dev/null) && UNAME_MACHINE=i486 + (/bin/uname -X|grep '^Machine.*Pentium' >/dev/null) \ + && UNAME_MACHINE=i586 + (/bin/uname -X|grep '^Machine.*Pent *II' >/dev/null) \ + && UNAME_MACHINE=i686 + (/bin/uname -X|grep '^Machine.*Pentium Pro' >/dev/null) \ + && UNAME_MACHINE=i686 + echo ${UNAME_MACHINE}-pc-sco$UNAME_REL + else + echo ${UNAME_MACHINE}-pc-sysv32 + fi + exit ;; + pc:*:*:*) + # Left here for compatibility: + # uname -m prints for DJGPP always 'pc', but it prints nothing about + # the processor, so we play safe by assuming i586. + # Note: whatever this is, it MUST be the same as what config.sub + # prints for the "djgpp" host, or else GDB configury will decide that + # this is a cross-build. + echo i586-pc-msdosdjgpp + exit ;; + Intel:Mach:3*:*) + echo i386-pc-mach3 + exit ;; + paragon:*:*:*) + echo i860-intel-osf1 + exit ;; + i860:*:4.*:*) # i860-SVR4 + if grep Stardent /usr/include/sys/uadmin.h >/dev/null 2>&1 ; then + echo i860-stardent-sysv${UNAME_RELEASE} # Stardent Vistra i860-SVR4 + else # Add other i860-SVR4 vendors below as they are discovered. + echo i860-unknown-sysv${UNAME_RELEASE} # Unknown i860-SVR4 + fi + exit ;; + mini*:CTIX:SYS*5:*) + # "miniframe" + echo m68010-convergent-sysv + exit ;; + mc68k:UNIX:SYSTEM5:3.51m) + echo m68k-convergent-sysv + exit ;; + M680?0:D-NIX:5.3:*) + echo m68k-diab-dnix + exit ;; + M68*:*:R3V[5678]*:*) + test -r /sysV68 && { echo 'm68k-motorola-sysv'; exit; } ;; + 3[345]??:*:4.0:3.0 | 3[34]??A:*:4.0:3.0 | 3[34]??,*:*:4.0:3.0 | 3[34]??/*:*:4.0:3.0 | 4400:*:4.0:3.0 | 4850:*:4.0:3.0 | SKA40:*:4.0:3.0 | SDS2:*:4.0:3.0 | SHG2:*:4.0:3.0 | S7501*:*:4.0:3.0) + OS_REL='' + test -r /etc/.relid \ + && OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid` + /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ + && { echo i486-ncr-sysv4.3${OS_REL}; exit; } + /bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \ + && { echo i586-ncr-sysv4.3${OS_REL}; exit; } ;; + 3[34]??:*:4.0:* | 3[34]??,*:*:4.0:*) + /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ + && { echo i486-ncr-sysv4; exit; } ;; + NCR*:*:4.2:* | MPRAS*:*:4.2:*) + OS_REL='.3' + test -r /etc/.relid \ + && OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid` + /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ + && { echo i486-ncr-sysv4.3${OS_REL}; exit; } + /bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \ + && { echo i586-ncr-sysv4.3${OS_REL}; exit; } + /bin/uname -p 2>/dev/null | /bin/grep pteron >/dev/null \ + && { echo i586-ncr-sysv4.3${OS_REL}; exit; } ;; + m68*:LynxOS:2.*:* | m68*:LynxOS:3.0*:*) + echo m68k-unknown-lynxos${UNAME_RELEASE} + exit ;; + mc68030:UNIX_System_V:4.*:*) + echo m68k-atari-sysv4 + exit ;; + TSUNAMI:LynxOS:2.*:*) + echo sparc-unknown-lynxos${UNAME_RELEASE} + exit ;; + rs6000:LynxOS:2.*:*) + echo rs6000-unknown-lynxos${UNAME_RELEASE} + exit ;; + PowerPC:LynxOS:2.*:* | PowerPC:LynxOS:3.[01]*:* | PowerPC:LynxOS:4.[02]*:*) + echo powerpc-unknown-lynxos${UNAME_RELEASE} + exit ;; + SM[BE]S:UNIX_SV:*:*) + echo mips-dde-sysv${UNAME_RELEASE} + exit ;; + RM*:ReliantUNIX-*:*:*) + echo mips-sni-sysv4 + exit ;; + RM*:SINIX-*:*:*) + echo mips-sni-sysv4 + exit ;; + *:SINIX-*:*:*) + if uname -p 2>/dev/null >/dev/null ; then + UNAME_MACHINE=`(uname -p) 2>/dev/null` + echo ${UNAME_MACHINE}-sni-sysv4 + else + echo ns32k-sni-sysv + fi + exit ;; + PENTIUM:*:4.0*:*) # Unisys `ClearPath HMP IX 4000' SVR4/MP effort + # says + echo i586-unisys-sysv4 + exit ;; + *:UNIX_System_V:4*:FTX*) + # From Gerald Hewes . + # How about differentiating between stratus architectures? -djm + echo hppa1.1-stratus-sysv4 + exit ;; + *:*:*:FTX*) + # From seanf@swdc.stratus.com. + echo i860-stratus-sysv4 + exit ;; + i*86:VOS:*:*) + # From Paul.Green@stratus.com. + echo ${UNAME_MACHINE}-stratus-vos + exit ;; + *:VOS:*:*) + # From Paul.Green@stratus.com. + echo hppa1.1-stratus-vos + exit ;; + mc68*:A/UX:*:*) + echo m68k-apple-aux${UNAME_RELEASE} + exit ;; + news*:NEWS-OS:6*:*) + echo mips-sony-newsos6 + exit ;; + R[34]000:*System_V*:*:* | R4000:UNIX_SYSV:*:* | R*000:UNIX_SV:*:*) + if [ -d /usr/nec ]; then + echo mips-nec-sysv${UNAME_RELEASE} + else + echo mips-unknown-sysv${UNAME_RELEASE} + fi + exit ;; + BeBox:BeOS:*:*) # BeOS running on hardware made by Be, PPC only. + echo powerpc-be-beos + exit ;; + BeMac:BeOS:*:*) # BeOS running on Mac or Mac clone, PPC only. + echo powerpc-apple-beos + exit ;; + BePC:BeOS:*:*) # BeOS running on Intel PC compatible. + echo i586-pc-beos + exit ;; + BePC:Haiku:*:*) # Haiku running on Intel PC compatible. + echo i586-pc-haiku + exit ;; + SX-4:SUPER-UX:*:*) + echo sx4-nec-superux${UNAME_RELEASE} + exit ;; + SX-5:SUPER-UX:*:*) + echo sx5-nec-superux${UNAME_RELEASE} + exit ;; + SX-6:SUPER-UX:*:*) + echo sx6-nec-superux${UNAME_RELEASE} + exit ;; + SX-7:SUPER-UX:*:*) + echo sx7-nec-superux${UNAME_RELEASE} + exit ;; + SX-8:SUPER-UX:*:*) + echo sx8-nec-superux${UNAME_RELEASE} + exit ;; + SX-8R:SUPER-UX:*:*) + echo sx8r-nec-superux${UNAME_RELEASE} + exit ;; + Power*:Rhapsody:*:*) + echo powerpc-apple-rhapsody${UNAME_RELEASE} + exit ;; + *:Rhapsody:*:*) + echo ${UNAME_MACHINE}-apple-rhapsody${UNAME_RELEASE} + exit ;; + *:Darwin:*:*) + UNAME_PROCESSOR=`uname -p` || UNAME_PROCESSOR=unknown + case $UNAME_PROCESSOR in + i386) + eval $set_cc_for_build + if [ "$CC_FOR_BUILD" != 'no_compiler_found' ]; then + if (echo '#ifdef __LP64__'; echo IS_64BIT_ARCH; echo '#endif') | \ + (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | \ + grep IS_64BIT_ARCH >/dev/null + then + UNAME_PROCESSOR="x86_64" + fi + fi ;; + unknown) UNAME_PROCESSOR=powerpc ;; + esac + echo ${UNAME_PROCESSOR}-apple-darwin${UNAME_RELEASE} + exit ;; + *:procnto*:*:* | *:QNX:[0123456789]*:*) + UNAME_PROCESSOR=`uname -p` + if test "$UNAME_PROCESSOR" = "x86"; then + UNAME_PROCESSOR=i386 + UNAME_MACHINE=pc + fi + echo ${UNAME_PROCESSOR}-${UNAME_MACHINE}-nto-qnx${UNAME_RELEASE} + exit ;; + *:QNX:*:4*) + echo i386-pc-qnx + exit ;; + NEO-?:NONSTOP_KERNEL:*:*) + echo neo-tandem-nsk${UNAME_RELEASE} + exit ;; + NSE-?:NONSTOP_KERNEL:*:*) + echo nse-tandem-nsk${UNAME_RELEASE} + exit ;; + NSR-?:NONSTOP_KERNEL:*:*) + echo nsr-tandem-nsk${UNAME_RELEASE} + exit ;; + *:NonStop-UX:*:*) + echo mips-compaq-nonstopux + exit ;; + BS2000:POSIX*:*:*) + echo bs2000-siemens-sysv + exit ;; + DS/*:UNIX_System_V:*:*) + echo ${UNAME_MACHINE}-${UNAME_SYSTEM}-${UNAME_RELEASE} + exit ;; + *:Plan9:*:*) + # "uname -m" is not consistent, so use $cputype instead. 386 + # is converted to i386 for consistency with other x86 + # operating systems. + if test "$cputype" = "386"; then + UNAME_MACHINE=i386 + else + UNAME_MACHINE="$cputype" + fi + echo ${UNAME_MACHINE}-unknown-plan9 + exit ;; + *:TOPS-10:*:*) + echo pdp10-unknown-tops10 + exit ;; + *:TENEX:*:*) + echo pdp10-unknown-tenex + exit ;; + KS10:TOPS-20:*:* | KL10:TOPS-20:*:* | TYPE4:TOPS-20:*:*) + echo pdp10-dec-tops20 + exit ;; + XKL-1:TOPS-20:*:* | TYPE5:TOPS-20:*:*) + echo pdp10-xkl-tops20 + exit ;; + *:TOPS-20:*:*) + echo pdp10-unknown-tops20 + exit ;; + *:ITS:*:*) + echo pdp10-unknown-its + exit ;; + SEI:*:*:SEIUX) + echo mips-sei-seiux${UNAME_RELEASE} + exit ;; + *:DragonFly:*:*) + echo ${UNAME_MACHINE}-unknown-dragonfly`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` + exit ;; + *:*VMS:*:*) + UNAME_MACHINE=`(uname -p) 2>/dev/null` + case "${UNAME_MACHINE}" in + A*) echo alpha-dec-vms ; exit ;; + I*) echo ia64-dec-vms ; exit ;; + V*) echo vax-dec-vms ; exit ;; + esac ;; + *:XENIX:*:SysV) + echo i386-pc-xenix + exit ;; + i*86:skyos:*:*) + echo ${UNAME_MACHINE}-pc-skyos`echo ${UNAME_RELEASE}` | sed -e 's/ .*$//' + exit ;; + i*86:rdos:*:*) + echo ${UNAME_MACHINE}-pc-rdos + exit ;; + i*86:AROS:*:*) + echo ${UNAME_MACHINE}-pc-aros + exit ;; + x86_64:VMkernel:*:*) + echo ${UNAME_MACHINE}-unknown-esx + exit ;; +esac + +#echo '(No uname command or uname output not recognized.)' 1>&2 +#echo "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" 1>&2 + +eval $set_cc_for_build +cat >$dummy.c < +# include +#endif +main () +{ +#if defined (sony) +#if defined (MIPSEB) + /* BFD wants "bsd" instead of "newsos". Perhaps BFD should be changed, + I don't know.... */ + printf ("mips-sony-bsd\n"); exit (0); +#else +#include + printf ("m68k-sony-newsos%s\n", +#ifdef NEWSOS4 + "4" +#else + "" +#endif + ); exit (0); +#endif +#endif + +#if defined (__arm) && defined (__acorn) && defined (__unix) + printf ("arm-acorn-riscix\n"); exit (0); +#endif + +#if defined (hp300) && !defined (hpux) + printf ("m68k-hp-bsd\n"); exit (0); +#endif + +#if defined (NeXT) +#if !defined (__ARCHITECTURE__) +#define __ARCHITECTURE__ "m68k" +#endif + int version; + version=`(hostinfo | sed -n 's/.*NeXT Mach \([0-9]*\).*/\1/p') 2>/dev/null`; + if (version < 4) + printf ("%s-next-nextstep%d\n", __ARCHITECTURE__, version); + else + printf ("%s-next-openstep%d\n", __ARCHITECTURE__, version); + exit (0); +#endif + +#if defined (MULTIMAX) || defined (n16) +#if defined (UMAXV) + printf ("ns32k-encore-sysv\n"); exit (0); +#else +#if defined (CMU) + printf ("ns32k-encore-mach\n"); exit (0); +#else + printf ("ns32k-encore-bsd\n"); exit (0); +#endif +#endif +#endif + +#if defined (__386BSD__) + printf ("i386-pc-bsd\n"); exit (0); +#endif + +#if defined (sequent) +#if defined (i386) + printf ("i386-sequent-dynix\n"); exit (0); +#endif +#if defined (ns32000) + printf ("ns32k-sequent-dynix\n"); exit (0); +#endif +#endif + +#if defined (_SEQUENT_) + struct utsname un; + + uname(&un); + + if (strncmp(un.version, "V2", 2) == 0) { + printf ("i386-sequent-ptx2\n"); exit (0); + } + if (strncmp(un.version, "V1", 2) == 0) { /* XXX is V1 correct? */ + printf ("i386-sequent-ptx1\n"); exit (0); + } + printf ("i386-sequent-ptx\n"); exit (0); + +#endif + +#if defined (vax) +# if !defined (ultrix) +# include +# if defined (BSD) +# if BSD == 43 + printf ("vax-dec-bsd4.3\n"); exit (0); +# else +# if BSD == 199006 + printf ("vax-dec-bsd4.3reno\n"); exit (0); +# else + printf ("vax-dec-bsd\n"); exit (0); +# endif +# endif +# else + printf ("vax-dec-bsd\n"); exit (0); +# endif +# else + printf ("vax-dec-ultrix\n"); exit (0); +# endif +#endif + +#if defined (alliant) && defined (i860) + printf ("i860-alliant-bsd\n"); exit (0); +#endif + + exit (1); +} +EOF + +$CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null && SYSTEM_NAME=`$dummy` && + { echo "$SYSTEM_NAME"; exit; } + +# Apollos put the system type in the environment. + +test -d /usr/apollo && { echo ${ISP}-apollo-${SYSTYPE}; exit; } + +# Convex versions that predate uname can use getsysinfo(1) + +if [ -x /usr/convex/getsysinfo ] +then + case `getsysinfo -f cpu_type` in + c1*) + echo c1-convex-bsd + exit ;; + c2*) + if getsysinfo -f scalar_acc + then echo c32-convex-bsd + else echo c2-convex-bsd + fi + exit ;; + c34*) + echo c34-convex-bsd + exit ;; + c38*) + echo c38-convex-bsd + exit ;; + c4*) + echo c4-convex-bsd + exit ;; + esac +fi + +cat >&2 < in order to provide the needed +information to handle your system. + +config.guess timestamp = $timestamp + +uname -m = `(uname -m) 2>/dev/null || echo unknown` +uname -r = `(uname -r) 2>/dev/null || echo unknown` +uname -s = `(uname -s) 2>/dev/null || echo unknown` +uname -v = `(uname -v) 2>/dev/null || echo unknown` + +/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null` +/bin/uname -X = `(/bin/uname -X) 2>/dev/null` + +hostinfo = `(hostinfo) 2>/dev/null` +/bin/universe = `(/bin/universe) 2>/dev/null` +/usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null` +/bin/arch = `(/bin/arch) 2>/dev/null` +/usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null` +/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null` + +UNAME_MACHINE = ${UNAME_MACHINE} +UNAME_RELEASE = ${UNAME_RELEASE} +UNAME_SYSTEM = ${UNAME_SYSTEM} +UNAME_VERSION = ${UNAME_VERSION} +EOF + +exit 1 + +# Local variables: +# eval: (add-hook 'write-file-hooks 'time-stamp) +# time-stamp-start: "timestamp='" +# time-stamp-format: "%:y-%02m-%02d" +# time-stamp-end: "'" +# End: diff --git a/vendor/libssh2/config.rpath b/vendor/libssh2/config.rpath new file mode 100755 index 000000000..e082db6bb --- /dev/null +++ b/vendor/libssh2/config.rpath @@ -0,0 +1,660 @@ +#! /bin/sh +# Output a system dependent set of variables, describing how to set the +# run time search path of shared libraries in an executable. +# +# Copyright 1996-2006 Free Software Foundation, Inc. +# Taken from GNU libtool, 2001 +# Originally by Gordon Matzigkeit , 1996 +# +# This file is free software; the Free Software Foundation gives +# unlimited permission to copy and/or distribute it, with or without +# modifications, as long as this notice is preserved. +# +# The first argument passed to this file is the canonical host specification, +# CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM +# or +# CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM +# The environment variables CC, GCC, LDFLAGS, LD, with_gnu_ld +# should be set by the caller. +# +# The set of defined variables is at the end of this script. + +# Known limitations: +# - On IRIX 6.5 with CC="cc", the run time search patch must not be longer +# than 256 bytes, otherwise the compiler driver will dump core. The only +# known workaround is to choose shorter directory names for the build +# directory and/or the installation directory. + +# All known linkers require a `.a' archive for static linking (except MSVC, +# which needs '.lib'). +libext=a +shrext=.so + +host="$1" +host_cpu=`echo "$host" | sed 's/^\([^-]*\)-\([^-]*\)-\(.*\)$/\1/'` +host_vendor=`echo "$host" | sed 's/^\([^-]*\)-\([^-]*\)-\(.*\)$/\2/'` +host_os=`echo "$host" | sed 's/^\([^-]*\)-\([^-]*\)-\(.*\)$/\3/'` + +# Code taken from libtool.m4's _LT_CC_BASENAME. + +for cc_temp in $CC""; do + case $cc_temp in + compile | *[\\/]compile | ccache | *[\\/]ccache ) ;; + distcc | *[\\/]distcc | purify | *[\\/]purify ) ;; + \-*) ;; + *) break;; + esac +done +cc_basename=`echo "$cc_temp" | sed -e 's%^.*/%%'` + +# Code taken from libtool.m4's AC_LIBTOOL_PROG_COMPILER_PIC. + +wl= +if test "$GCC" = yes; then + wl='-Wl,' +else + case "$host_os" in + aix*) + wl='-Wl,' + ;; + darwin*) + case $cc_basename in + xlc*) + wl='-Wl,' + ;; + esac + ;; + mingw* | pw32* | os2*) + ;; + hpux9* | hpux10* | hpux11*) + wl='-Wl,' + ;; + irix5* | irix6* | nonstopux*) + wl='-Wl,' + ;; + newsos6) + ;; + linux*) + case $cc_basename in + icc* | ecc*) + wl='-Wl,' + ;; + pgcc | pgf77 | pgf90) + wl='-Wl,' + ;; + ccc*) + wl='-Wl,' + ;; + como) + wl='-lopt=' + ;; + *) + case `$CC -V 2>&1 | sed 5q` in + *Sun\ C*) + wl='-Wl,' + ;; + esac + ;; + esac + ;; + osf3* | osf4* | osf5*) + wl='-Wl,' + ;; + sco3.2v5*) + ;; + solaris*) + wl='-Wl,' + ;; + sunos4*) + wl='-Qoption ld ' + ;; + sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*) + wl='-Wl,' + ;; + sysv4*MP*) + ;; + unicos*) + wl='-Wl,' + ;; + uts4*) + ;; + esac +fi + +# Code taken from libtool.m4's AC_LIBTOOL_PROG_LD_SHLIBS. + +hardcode_libdir_flag_spec= +hardcode_libdir_separator= +hardcode_direct=no +hardcode_minus_L=no + +case "$host_os" in + cygwin* | mingw* | pw32*) + # FIXME: the MSVC++ port hasn't been tested in a loooong time + # When not using gcc, we currently assume that we are using + # Microsoft Visual C++. + if test "$GCC" != yes; then + with_gnu_ld=no + fi + ;; + interix*) + # we just hope/assume this is gcc and not c89 (= MSVC++) + with_gnu_ld=yes + ;; + openbsd*) + with_gnu_ld=no + ;; +esac + +ld_shlibs=yes +if test "$with_gnu_ld" = yes; then + # Set some defaults for GNU ld with shared library support. These + # are reset later if shared libraries are not supported. Putting them + # here allows them to be overridden if necessary. + # Unlike libtool, we use -rpath here, not --rpath, since the documented + # option of GNU ld is called -rpath, not --rpath. + hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' + case "$host_os" in + aix3* | aix4* | aix5*) + # On AIX/PPC, the GNU linker is very broken + if test "$host_cpu" != ia64; then + ld_shlibs=no + fi + ;; + amigaos*) + hardcode_libdir_flag_spec='-L$libdir' + hardcode_minus_L=yes + # Samuel A. Falvo II reports + # that the semantics of dynamic libraries on AmigaOS, at least up + # to version 4, is to share data among multiple programs linked + # with the same dynamic library. Since this doesn't match the + # behavior of shared libraries on other platforms, we cannot use + # them. + ld_shlibs=no + ;; + beos*) + if $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then + : + else + ld_shlibs=no + fi + ;; + cygwin* | mingw* | pw32*) + # hardcode_libdir_flag_spec is actually meaningless, as there is + # no search path for DLLs. + hardcode_libdir_flag_spec='-L$libdir' + if $LD --help 2>&1 | grep 'auto-import' > /dev/null; then + : + else + ld_shlibs=no + fi + ;; + interix3*) + hardcode_direct=no + hardcode_libdir_flag_spec='${wl}-rpath,$libdir' + ;; + linux*) + if $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then + : + else + ld_shlibs=no + fi + ;; + netbsd*) + ;; + solaris*) + if $LD -v 2>&1 | grep 'BFD 2\.8' > /dev/null; then + ld_shlibs=no + elif $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then + : + else + ld_shlibs=no + fi + ;; + sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX*) + case `$LD -v 2>&1` in + *\ [01].* | *\ 2.[0-9].* | *\ 2.1[0-5].*) + ld_shlibs=no + ;; + *) + if $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then + hardcode_libdir_flag_spec='`test -z "$SCOABSPATH" && echo ${wl}-rpath,$libdir`' + else + ld_shlibs=no + fi + ;; + esac + ;; + sunos4*) + hardcode_direct=yes + ;; + *) + if $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then + : + else + ld_shlibs=no + fi + ;; + esac + if test "$ld_shlibs" = no; then + hardcode_libdir_flag_spec= + fi +else + case "$host_os" in + aix3*) + # Note: this linker hardcodes the directories in LIBPATH if there + # are no directories specified by -L. + hardcode_minus_L=yes + if test "$GCC" = yes; then + # Neither direct hardcoding nor static linking is supported with a + # broken collect2. + hardcode_direct=unsupported + fi + ;; + aix4* | aix5*) + if test "$host_cpu" = ia64; then + # On IA64, the linker does run time linking by default, so we don't + # have to do anything special. + aix_use_runtimelinking=no + else + aix_use_runtimelinking=no + # Test if we are trying to use run time linking or normal + # AIX style linking. If -brtl is somewhere in LDFLAGS, we + # need to do runtime linking. + case $host_os in aix4.[23]|aix4.[23].*|aix5*) + for ld_flag in $LDFLAGS; do + if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then + aix_use_runtimelinking=yes + break + fi + done + ;; + esac + fi + hardcode_direct=yes + hardcode_libdir_separator=':' + if test "$GCC" = yes; then + case $host_os in aix4.[012]|aix4.[012].*) + collect2name=`${CC} -print-prog-name=collect2` + if test -f "$collect2name" && \ + strings "$collect2name" | grep resolve_lib_name >/dev/null + then + # We have reworked collect2 + hardcode_direct=yes + else + # We have old collect2 + hardcode_direct=unsupported + hardcode_minus_L=yes + hardcode_libdir_flag_spec='-L$libdir' + hardcode_libdir_separator= + fi + ;; + esac + fi + # Begin _LT_AC_SYS_LIBPATH_AIX. + echo 'int main () { return 0; }' > conftest.c + ${CC} ${LDFLAGS} conftest.c -o conftest + aix_libpath=`dump -H conftest 2>/dev/null | sed -n -e '/Import File Strings/,/^$/ { /^0/ { s/^0 *\(.*\)$/\1/; p; } +}'` + if test -z "$aix_libpath"; then + aix_libpath=`dump -HX64 conftest 2>/dev/null | sed -n -e '/Import File Strings/,/^$/ { /^0/ { s/^0 *\(.*\)$/\1/; p; } +}'` + fi + if test -z "$aix_libpath"; then + aix_libpath="/usr/lib:/lib" + fi + rm -f conftest.c conftest + # End _LT_AC_SYS_LIBPATH_AIX. + if test "$aix_use_runtimelinking" = yes; then + hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" + else + if test "$host_cpu" = ia64; then + hardcode_libdir_flag_spec='${wl}-R $libdir:/usr/lib:/lib' + else + hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" + fi + fi + ;; + amigaos*) + hardcode_libdir_flag_spec='-L$libdir' + hardcode_minus_L=yes + # see comment about different semantics on the GNU ld section + ld_shlibs=no + ;; + bsdi[45]*) + ;; + cygwin* | mingw* | pw32*) + # When not using gcc, we currently assume that we are using + # Microsoft Visual C++. + # hardcode_libdir_flag_spec is actually meaningless, as there is + # no search path for DLLs. + hardcode_libdir_flag_spec=' ' + libext=lib + ;; + darwin* | rhapsody*) + hardcode_direct=no + if test "$GCC" = yes ; then + : + else + case $cc_basename in + xlc*) + ;; + *) + ld_shlibs=no + ;; + esac + fi + ;; + dgux*) + hardcode_libdir_flag_spec='-L$libdir' + ;; + freebsd1*) + ld_shlibs=no + ;; + freebsd2.2*) + hardcode_libdir_flag_spec='-R$libdir' + hardcode_direct=yes + ;; + freebsd2*) + hardcode_direct=yes + hardcode_minus_L=yes + ;; + freebsd* | kfreebsd*-gnu | dragonfly*) + hardcode_libdir_flag_spec='-R$libdir' + hardcode_direct=yes + ;; + hpux9*) + hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' + hardcode_libdir_separator=: + hardcode_direct=yes + # hardcode_minus_L: Not really in the search PATH, + # but as the default location of the library. + hardcode_minus_L=yes + ;; + hpux10*) + if test "$with_gnu_ld" = no; then + hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' + hardcode_libdir_separator=: + hardcode_direct=yes + # hardcode_minus_L: Not really in the search PATH, + # but as the default location of the library. + hardcode_minus_L=yes + fi + ;; + hpux11*) + if test "$with_gnu_ld" = no; then + hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' + hardcode_libdir_separator=: + case $host_cpu in + hppa*64*|ia64*) + hardcode_direct=no + ;; + *) + hardcode_direct=yes + # hardcode_minus_L: Not really in the search PATH, + # but as the default location of the library. + hardcode_minus_L=yes + ;; + esac + fi + ;; + irix5* | irix6* | nonstopux*) + hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' + hardcode_libdir_separator=: + ;; + netbsd*) + hardcode_libdir_flag_spec='-R$libdir' + hardcode_direct=yes + ;; + newsos6) + hardcode_direct=yes + hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' + hardcode_libdir_separator=: + ;; + openbsd*) + hardcode_direct=yes + if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then + hardcode_libdir_flag_spec='${wl}-rpath,$libdir' + else + case "$host_os" in + openbsd[01].* | openbsd2.[0-7] | openbsd2.[0-7].*) + hardcode_libdir_flag_spec='-R$libdir' + ;; + *) + hardcode_libdir_flag_spec='${wl}-rpath,$libdir' + ;; + esac + fi + ;; + os2*) + hardcode_libdir_flag_spec='-L$libdir' + hardcode_minus_L=yes + ;; + osf3*) + hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' + hardcode_libdir_separator=: + ;; + osf4* | osf5*) + if test "$GCC" = yes; then + hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' + else + # Both cc and cxx compiler support -rpath directly + hardcode_libdir_flag_spec='-rpath $libdir' + fi + hardcode_libdir_separator=: + ;; + solaris*) + hardcode_libdir_flag_spec='-R$libdir' + ;; + sunos4*) + hardcode_libdir_flag_spec='-L$libdir' + hardcode_direct=yes + hardcode_minus_L=yes + ;; + sysv4) + case $host_vendor in + sni) + hardcode_direct=yes # is this really true??? + ;; + siemens) + hardcode_direct=no + ;; + motorola) + hardcode_direct=no #Motorola manual says yes, but my tests say they lie + ;; + esac + ;; + sysv4.3*) + ;; + sysv4*MP*) + if test -d /usr/nec; then + ld_shlibs=yes + fi + ;; + sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[01].[10]* | unixware7*) + ;; + sysv5* | sco3.2v5* | sco5v6*) + hardcode_libdir_flag_spec='`test -z "$SCOABSPATH" && echo ${wl}-R,$libdir`' + hardcode_libdir_separator=':' + ;; + uts4*) + hardcode_libdir_flag_spec='-L$libdir' + ;; + *) + ld_shlibs=no + ;; + esac +fi + +# Check dynamic linker characteristics +# Code taken from libtool.m4's AC_LIBTOOL_SYS_DYNAMIC_LINKER. +# Unlike libtool.m4, here we don't care about _all_ names of the library, but +# only about the one the linker finds when passed -lNAME. This is the last +# element of library_names_spec in libtool.m4, or possibly two of them if the +# linker has special search rules. +library_names_spec= # the last element of library_names_spec in libtool.m4 +libname_spec='lib$name' +case "$host_os" in + aix3*) + library_names_spec='$libname.a' + ;; + aix4* | aix5*) + library_names_spec='$libname$shrext' + ;; + amigaos*) + library_names_spec='$libname.a' + ;; + beos*) + library_names_spec='$libname$shrext' + ;; + bsdi[45]*) + library_names_spec='$libname$shrext' + ;; + cygwin* | mingw* | pw32*) + shrext=.dll + library_names_spec='$libname.dll.a $libname.lib' + ;; + darwin* | rhapsody*) + shrext=.dylib + library_names_spec='$libname$shrext' + ;; + dgux*) + library_names_spec='$libname$shrext' + ;; + freebsd1*) + ;; + kfreebsd*-gnu) + library_names_spec='$libname$shrext' + ;; + freebsd* | dragonfly*) + case "$host_os" in + freebsd[123]*) + library_names_spec='$libname$shrext$versuffix' ;; + *) + library_names_spec='$libname$shrext' ;; + esac + ;; + gnu*) + library_names_spec='$libname$shrext' + ;; + hpux9* | hpux10* | hpux11*) + case $host_cpu in + ia64*) + shrext=.so + ;; + hppa*64*) + shrext=.sl + ;; + *) + shrext=.sl + ;; + esac + library_names_spec='$libname$shrext' + ;; + interix3*) + library_names_spec='$libname$shrext' + ;; + irix5* | irix6* | nonstopux*) + library_names_spec='$libname$shrext' + case "$host_os" in + irix5* | nonstopux*) + libsuff= shlibsuff= + ;; + *) + case $LD in + *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ") libsuff= shlibsuff= ;; + *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ") libsuff=32 shlibsuff=N32 ;; + *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ") libsuff=64 shlibsuff=64 ;; + *) libsuff= shlibsuff= ;; + esac + ;; + esac + ;; + linux*oldld* | linux*aout* | linux*coff*) + ;; + linux*) + library_names_spec='$libname$shrext' + ;; + knetbsd*-gnu) + library_names_spec='$libname$shrext' + ;; + netbsd*) + library_names_spec='$libname$shrext' + ;; + newsos6) + library_names_spec='$libname$shrext' + ;; + nto-qnx*) + library_names_spec='$libname$shrext' + ;; + openbsd*) + library_names_spec='$libname$shrext$versuffix' + ;; + os2*) + libname_spec='$name' + shrext=.dll + library_names_spec='$libname.a' + ;; + osf3* | osf4* | osf5*) + library_names_spec='$libname$shrext' + ;; + solaris*) + library_names_spec='$libname$shrext' + ;; + sunos4*) + library_names_spec='$libname$shrext$versuffix' + ;; + sysv4 | sysv4.3*) + library_names_spec='$libname$shrext' + ;; + sysv4*MP*) + library_names_spec='$libname$shrext' + ;; + sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) + library_names_spec='$libname$shrext' + ;; + uts4*) + library_names_spec='$libname$shrext' + ;; +esac + +sed_quote_subst='s/\(["`$\\]\)/\\\1/g' +escaped_wl=`echo "X$wl" | sed -e 's/^X//' -e "$sed_quote_subst"` +shlibext=`echo "$shrext" | sed -e 's,^\.,,'` +escaped_libname_spec=`echo "X$libname_spec" | sed -e 's/^X//' -e "$sed_quote_subst"` +escaped_library_names_spec=`echo "X$library_names_spec" | sed -e 's/^X//' -e "$sed_quote_subst"` +escaped_hardcode_libdir_flag_spec=`echo "X$hardcode_libdir_flag_spec" | sed -e 's/^X//' -e "$sed_quote_subst"` + +LC_ALL=C sed -e 's/^\([a-zA-Z0-9_]*\)=/acl_cv_\1=/' <. +# +# As a special exception to the GNU General Public License, if you +# distribute this file as part of a program that contains a +# configuration script generated by Autoconf, you may include it under +# the same distribution terms that you use for the rest of that program. + + +# Please send patches to . Submit a context +# diff and a properly formatted GNU ChangeLog entry. +# +# Configuration subroutine to validate and canonicalize a configuration type. +# Supply the specified configuration type as an argument. +# If it is invalid, we print an error message on stderr and exit with code 1. +# Otherwise, we print the canonical config type on stdout and succeed. + +# You can get the latest version of this script from: +# http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub;hb=HEAD + +# This file is supposed to be the same for all GNU packages +# and recognize all the CPU types, system types and aliases +# that are meaningful with *any* GNU software. +# Each package is responsible for reporting which valid configurations +# it does not support. The user should be able to distinguish +# a failure to support a valid configuration from a meaningless +# configuration. + +# The goal of this file is to map all the various variations of a given +# machine specification into a single specification in the form: +# CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM +# or in some cases, the newer four-part form: +# CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM +# It is wrong to echo any other type of specification. + +me=`echo "$0" | sed -e 's,.*/,,'` + +usage="\ +Usage: $0 [OPTION] CPU-MFR-OPSYS + $0 [OPTION] ALIAS + +Canonicalize a configuration name. + +Operation modes: + -h, --help print this help, then exit + -t, --time-stamp print date of last modification, then exit + -v, --version print version number, then exit + +Report bugs and patches to ." + +version="\ +GNU config.sub ($timestamp) + +Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, +2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012 +Free Software Foundation, Inc. + +This is free software; see the source for copying conditions. There is NO +warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE." + +help=" +Try \`$me --help' for more information." + +# Parse command line +while test $# -gt 0 ; do + case $1 in + --time-stamp | --time* | -t ) + echo "$timestamp" ; exit ;; + --version | -v ) + echo "$version" ; exit ;; + --help | --h* | -h ) + echo "$usage"; exit ;; + -- ) # Stop option processing + shift; break ;; + - ) # Use stdin as input. + break ;; + -* ) + echo "$me: invalid option $1$help" + exit 1 ;; + + *local*) + # First pass through any local machine types. + echo $1 + exit ;; + + * ) + break ;; + esac +done + +case $# in + 0) echo "$me: missing argument$help" >&2 + exit 1;; + 1) ;; + *) echo "$me: too many arguments$help" >&2 + exit 1;; +esac + +# Separate what the user gave into CPU-COMPANY and OS or KERNEL-OS (if any). +# Here we must recognize all the valid KERNEL-OS combinations. +maybe_os=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\2/'` +case $maybe_os in + nto-qnx* | linux-gnu* | linux-android* | linux-dietlibc | linux-newlib* | \ + linux-uclibc* | uclinux-uclibc* | uclinux-gnu* | kfreebsd*-gnu* | \ + knetbsd*-gnu* | netbsd*-gnu* | \ + kopensolaris*-gnu* | \ + storm-chaos* | os2-emx* | rtmk-nova*) + os=-$maybe_os + basic_machine=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\1/'` + ;; + android-linux) + os=-linux-android + basic_machine=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\1/'`-unknown + ;; + *) + basic_machine=`echo $1 | sed 's/-[^-]*$//'` + if [ $basic_machine != $1 ] + then os=`echo $1 | sed 's/.*-/-/'` + else os=; fi + ;; +esac + +### Let's recognize common machines as not being operating systems so +### that things like config.sub decstation-3100 work. We also +### recognize some manufacturers as not being operating systems, so we +### can provide default operating systems below. +case $os in + -sun*os*) + # Prevent following clause from handling this invalid input. + ;; + -dec* | -mips* | -sequent* | -encore* | -pc532* | -sgi* | -sony* | \ + -att* | -7300* | -3300* | -delta* | -motorola* | -sun[234]* | \ + -unicom* | -ibm* | -next | -hp | -isi* | -apollo | -altos* | \ + -convergent* | -ncr* | -news | -32* | -3600* | -3100* | -hitachi* |\ + -c[123]* | -convex* | -sun | -crds | -omron* | -dg | -ultra | -tti* | \ + -harris | -dolphin | -highlevel | -gould | -cbm | -ns | -masscomp | \ + -apple | -axis | -knuth | -cray | -microblaze) + os= + basic_machine=$1 + ;; + -bluegene*) + os=-cnk + ;; + -sim | -cisco | -oki | -wec | -winbond) + os= + basic_machine=$1 + ;; + -scout) + ;; + -wrs) + os=-vxworks + basic_machine=$1 + ;; + -chorusos*) + os=-chorusos + basic_machine=$1 + ;; + -chorusrdb) + os=-chorusrdb + basic_machine=$1 + ;; + -hiux*) + os=-hiuxwe2 + ;; + -sco6) + os=-sco5v6 + basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` + ;; + -sco5) + os=-sco3.2v5 + basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` + ;; + -sco4) + os=-sco3.2v4 + basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` + ;; + -sco3.2.[4-9]*) + os=`echo $os | sed -e 's/sco3.2./sco3.2v/'` + basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` + ;; + -sco3.2v[4-9]*) + # Don't forget version if it is 3.2v4 or newer. + basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` + ;; + -sco5v6*) + # Don't forget version if it is 3.2v4 or newer. + basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` + ;; + -sco*) + os=-sco3.2v2 + basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` + ;; + -udk*) + basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` + ;; + -isc) + os=-isc2.2 + basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` + ;; + -clix*) + basic_machine=clipper-intergraph + ;; + -isc*) + basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` + ;; + -lynx*) + os=-lynxos + ;; + -ptx*) + basic_machine=`echo $1 | sed -e 's/86-.*/86-sequent/'` + ;; + -windowsnt*) + os=`echo $os | sed -e 's/windowsnt/winnt/'` + ;; + -psos*) + os=-psos + ;; + -mint | -mint[0-9]*) + basic_machine=m68k-atari + os=-mint + ;; +esac + +# Decode aliases for certain CPU-COMPANY combinations. +case $basic_machine in + # Recognize the basic CPU types without company name. + # Some are omitted here because they have special meanings below. + 1750a | 580 \ + | a29k \ + | aarch64 | aarch64_be \ + | alpha | alphaev[4-8] | alphaev56 | alphaev6[78] | alphapca5[67] \ + | alpha64 | alpha64ev[4-8] | alpha64ev56 | alpha64ev6[78] | alpha64pca5[67] \ + | am33_2.0 \ + | arc | arm | arm[bl]e | arme[lb] | armv[2345] | armv[345][lb] | avr | avr32 \ + | be32 | be64 \ + | bfin \ + | c4x | clipper \ + | d10v | d30v | dlx | dsp16xx \ + | epiphany \ + | fido | fr30 | frv \ + | h8300 | h8500 | hppa | hppa1.[01] | hppa2.0 | hppa2.0[nw] | hppa64 \ + | hexagon \ + | i370 | i860 | i960 | ia64 \ + | ip2k | iq2000 \ + | le32 | le64 \ + | lm32 \ + | m32c | m32r | m32rle | m68000 | m68k | m88k \ + | maxq | mb | microblaze | mcore | mep | metag \ + | mips | mipsbe | mipseb | mipsel | mipsle \ + | mips16 \ + | mips64 | mips64el \ + | mips64octeon | mips64octeonel \ + | mips64orion | mips64orionel \ + | mips64r5900 | mips64r5900el \ + | mips64vr | mips64vrel \ + | mips64vr4100 | mips64vr4100el \ + | mips64vr4300 | mips64vr4300el \ + | mips64vr5000 | mips64vr5000el \ + | mips64vr5900 | mips64vr5900el \ + | mipsisa32 | mipsisa32el \ + | mipsisa32r2 | mipsisa32r2el \ + | mipsisa64 | mipsisa64el \ + | mipsisa64r2 | mipsisa64r2el \ + | mipsisa64sb1 | mipsisa64sb1el \ + | mipsisa64sr71k | mipsisa64sr71kel \ + | mipstx39 | mipstx39el \ + | mn10200 | mn10300 \ + | moxie \ + | mt \ + | msp430 \ + | nds32 | nds32le | nds32be \ + | nios | nios2 \ + | ns16k | ns32k \ + | open8 \ + | or32 \ + | pdp10 | pdp11 | pj | pjl \ + | powerpc | powerpc64 | powerpc64le | powerpcle \ + | pyramid \ + | rl78 | rx \ + | score \ + | sh | sh[1234] | sh[24]a | sh[24]aeb | sh[23]e | sh[34]eb | sheb | shbe | shle | sh[1234]le | sh3ele \ + | sh64 | sh64le \ + | sparc | sparc64 | sparc64b | sparc64v | sparc86x | sparclet | sparclite \ + | sparcv8 | sparcv9 | sparcv9b | sparcv9v \ + | spu \ + | tahoe | tic4x | tic54x | tic55x | tic6x | tic80 | tron \ + | ubicom32 \ + | v850 | v850e | v850e1 | v850e2 | v850es | v850e2v3 \ + | we32k \ + | x86 | xc16x | xstormy16 | xtensa \ + | z8k | z80) + basic_machine=$basic_machine-unknown + ;; + c54x) + basic_machine=tic54x-unknown + ;; + c55x) + basic_machine=tic55x-unknown + ;; + c6x) + basic_machine=tic6x-unknown + ;; + m6811 | m68hc11 | m6812 | m68hc12 | m68hcs12x | picochip) + basic_machine=$basic_machine-unknown + os=-none + ;; + m88110 | m680[12346]0 | m683?2 | m68360 | m5200 | v70 | w65 | z8k) + ;; + ms1) + basic_machine=mt-unknown + ;; + + strongarm | thumb | xscale) + basic_machine=arm-unknown + ;; + xgate) + basic_machine=$basic_machine-unknown + os=-none + ;; + xscaleeb) + basic_machine=armeb-unknown + ;; + + xscaleel) + basic_machine=armel-unknown + ;; + + # We use `pc' rather than `unknown' + # because (1) that's what they normally are, and + # (2) the word "unknown" tends to confuse beginning users. + i*86 | x86_64) + basic_machine=$basic_machine-pc + ;; + # Object if more than one company name word. + *-*-*) + echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2 + exit 1 + ;; + # Recognize the basic CPU types with company name. + 580-* \ + | a29k-* \ + | aarch64-* | aarch64_be-* \ + | alpha-* | alphaev[4-8]-* | alphaev56-* | alphaev6[78]-* \ + | alpha64-* | alpha64ev[4-8]-* | alpha64ev56-* | alpha64ev6[78]-* \ + | alphapca5[67]-* | alpha64pca5[67]-* | arc-* \ + | arm-* | armbe-* | armle-* | armeb-* | armv*-* \ + | avr-* | avr32-* \ + | be32-* | be64-* \ + | bfin-* | bs2000-* \ + | c[123]* | c30-* | [cjt]90-* | c4x-* \ + | clipper-* | craynv-* | cydra-* \ + | d10v-* | d30v-* | dlx-* \ + | elxsi-* \ + | f30[01]-* | f700-* | fido-* | fr30-* | frv-* | fx80-* \ + | h8300-* | h8500-* \ + | hppa-* | hppa1.[01]-* | hppa2.0-* | hppa2.0[nw]-* | hppa64-* \ + | hexagon-* \ + | i*86-* | i860-* | i960-* | ia64-* \ + | ip2k-* | iq2000-* \ + | le32-* | le64-* \ + | lm32-* \ + | m32c-* | m32r-* | m32rle-* \ + | m68000-* | m680[012346]0-* | m68360-* | m683?2-* | m68k-* \ + | m88110-* | m88k-* | maxq-* | mcore-* | metag-* | microblaze-* \ + | mips-* | mipsbe-* | mipseb-* | mipsel-* | mipsle-* \ + | mips16-* \ + | mips64-* | mips64el-* \ + | mips64octeon-* | mips64octeonel-* \ + | mips64orion-* | mips64orionel-* \ + | mips64r5900-* | mips64r5900el-* \ + | mips64vr-* | mips64vrel-* \ + | mips64vr4100-* | mips64vr4100el-* \ + | mips64vr4300-* | mips64vr4300el-* \ + | mips64vr5000-* | mips64vr5000el-* \ + | mips64vr5900-* | mips64vr5900el-* \ + | mipsisa32-* | mipsisa32el-* \ + | mipsisa32r2-* | mipsisa32r2el-* \ + | mipsisa64-* | mipsisa64el-* \ + | mipsisa64r2-* | mipsisa64r2el-* \ + | mipsisa64sb1-* | mipsisa64sb1el-* \ + | mipsisa64sr71k-* | mipsisa64sr71kel-* \ + | mipstx39-* | mipstx39el-* \ + | mmix-* \ + | mt-* \ + | msp430-* \ + | nds32-* | nds32le-* | nds32be-* \ + | nios-* | nios2-* \ + | none-* | np1-* | ns16k-* | ns32k-* \ + | open8-* \ + | orion-* \ + | pdp10-* | pdp11-* | pj-* | pjl-* | pn-* | power-* \ + | powerpc-* | powerpc64-* | powerpc64le-* | powerpcle-* \ + | pyramid-* \ + | rl78-* | romp-* | rs6000-* | rx-* \ + | sh-* | sh[1234]-* | sh[24]a-* | sh[24]aeb-* | sh[23]e-* | sh[34]eb-* | sheb-* | shbe-* \ + | shle-* | sh[1234]le-* | sh3ele-* | sh64-* | sh64le-* \ + | sparc-* | sparc64-* | sparc64b-* | sparc64v-* | sparc86x-* | sparclet-* \ + | sparclite-* \ + | sparcv8-* | sparcv9-* | sparcv9b-* | sparcv9v-* | sv1-* | sx?-* \ + | tahoe-* \ + | tic30-* | tic4x-* | tic54x-* | tic55x-* | tic6x-* | tic80-* \ + | tile*-* \ + | tron-* \ + | ubicom32-* \ + | v850-* | v850e-* | v850e1-* | v850es-* | v850e2-* | v850e2v3-* \ + | vax-* \ + | we32k-* \ + | x86-* | x86_64-* | xc16x-* | xps100-* \ + | xstormy16-* | xtensa*-* \ + | ymp-* \ + | z8k-* | z80-*) + ;; + # Recognize the basic CPU types without company name, with glob match. + xtensa*) + basic_machine=$basic_machine-unknown + ;; + # Recognize the various machine names and aliases which stand + # for a CPU type and a company and sometimes even an OS. + 386bsd) + basic_machine=i386-unknown + os=-bsd + ;; + 3b1 | 7300 | 7300-att | att-7300 | pc7300 | safari | unixpc) + basic_machine=m68000-att + ;; + 3b*) + basic_machine=we32k-att + ;; + a29khif) + basic_machine=a29k-amd + os=-udi + ;; + abacus) + basic_machine=abacus-unknown + ;; + adobe68k) + basic_machine=m68010-adobe + os=-scout + ;; + alliant | fx80) + basic_machine=fx80-alliant + ;; + altos | altos3068) + basic_machine=m68k-altos + ;; + am29k) + basic_machine=a29k-none + os=-bsd + ;; + amd64) + basic_machine=x86_64-pc + ;; + amd64-*) + basic_machine=x86_64-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + amdahl) + basic_machine=580-amdahl + os=-sysv + ;; + amiga | amiga-*) + basic_machine=m68k-unknown + ;; + amigaos | amigados) + basic_machine=m68k-unknown + os=-amigaos + ;; + amigaunix | amix) + basic_machine=m68k-unknown + os=-sysv4 + ;; + apollo68) + basic_machine=m68k-apollo + os=-sysv + ;; + apollo68bsd) + basic_machine=m68k-apollo + os=-bsd + ;; + aros) + basic_machine=i386-pc + os=-aros + ;; + aux) + basic_machine=m68k-apple + os=-aux + ;; + balance) + basic_machine=ns32k-sequent + os=-dynix + ;; + blackfin) + basic_machine=bfin-unknown + os=-linux + ;; + blackfin-*) + basic_machine=bfin-`echo $basic_machine | sed 's/^[^-]*-//'` + os=-linux + ;; + bluegene*) + basic_machine=powerpc-ibm + os=-cnk + ;; + c54x-*) + basic_machine=tic54x-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + c55x-*) + basic_machine=tic55x-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + c6x-*) + basic_machine=tic6x-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + c90) + basic_machine=c90-cray + os=-unicos + ;; + cegcc) + basic_machine=arm-unknown + os=-cegcc + ;; + convex-c1) + basic_machine=c1-convex + os=-bsd + ;; + convex-c2) + basic_machine=c2-convex + os=-bsd + ;; + convex-c32) + basic_machine=c32-convex + os=-bsd + ;; + convex-c34) + basic_machine=c34-convex + os=-bsd + ;; + convex-c38) + basic_machine=c38-convex + os=-bsd + ;; + cray | j90) + basic_machine=j90-cray + os=-unicos + ;; + craynv) + basic_machine=craynv-cray + os=-unicosmp + ;; + cr16 | cr16-*) + basic_machine=cr16-unknown + os=-elf + ;; + crds | unos) + basic_machine=m68k-crds + ;; + crisv32 | crisv32-* | etraxfs*) + basic_machine=crisv32-axis + ;; + cris | cris-* | etrax*) + basic_machine=cris-axis + ;; + crx) + basic_machine=crx-unknown + os=-elf + ;; + da30 | da30-*) + basic_machine=m68k-da30 + ;; + decstation | decstation-3100 | pmax | pmax-* | pmin | dec3100 | decstatn) + basic_machine=mips-dec + ;; + decsystem10* | dec10*) + basic_machine=pdp10-dec + os=-tops10 + ;; + decsystem20* | dec20*) + basic_machine=pdp10-dec + os=-tops20 + ;; + delta | 3300 | motorola-3300 | motorola-delta \ + | 3300-motorola | delta-motorola) + basic_machine=m68k-motorola + ;; + delta88) + basic_machine=m88k-motorola + os=-sysv3 + ;; + dicos) + basic_machine=i686-pc + os=-dicos + ;; + djgpp) + basic_machine=i586-pc + os=-msdosdjgpp + ;; + dpx20 | dpx20-*) + basic_machine=rs6000-bull + os=-bosx + ;; + dpx2* | dpx2*-bull) + basic_machine=m68k-bull + os=-sysv3 + ;; + ebmon29k) + basic_machine=a29k-amd + os=-ebmon + ;; + elxsi) + basic_machine=elxsi-elxsi + os=-bsd + ;; + encore | umax | mmax) + basic_machine=ns32k-encore + ;; + es1800 | OSE68k | ose68k | ose | OSE) + basic_machine=m68k-ericsson + os=-ose + ;; + fx2800) + basic_machine=i860-alliant + ;; + genix) + basic_machine=ns32k-ns + ;; + gmicro) + basic_machine=tron-gmicro + os=-sysv + ;; + go32) + basic_machine=i386-pc + os=-go32 + ;; + h3050r* | hiux*) + basic_machine=hppa1.1-hitachi + os=-hiuxwe2 + ;; + h8300hms) + basic_machine=h8300-hitachi + os=-hms + ;; + h8300xray) + basic_machine=h8300-hitachi + os=-xray + ;; + h8500hms) + basic_machine=h8500-hitachi + os=-hms + ;; + harris) + basic_machine=m88k-harris + os=-sysv3 + ;; + hp300-*) + basic_machine=m68k-hp + ;; + hp300bsd) + basic_machine=m68k-hp + os=-bsd + ;; + hp300hpux) + basic_machine=m68k-hp + os=-hpux + ;; + hp3k9[0-9][0-9] | hp9[0-9][0-9]) + basic_machine=hppa1.0-hp + ;; + hp9k2[0-9][0-9] | hp9k31[0-9]) + basic_machine=m68000-hp + ;; + hp9k3[2-9][0-9]) + basic_machine=m68k-hp + ;; + hp9k6[0-9][0-9] | hp6[0-9][0-9]) + basic_machine=hppa1.0-hp + ;; + hp9k7[0-79][0-9] | hp7[0-79][0-9]) + basic_machine=hppa1.1-hp + ;; + hp9k78[0-9] | hp78[0-9]) + # FIXME: really hppa2.0-hp + basic_machine=hppa1.1-hp + ;; + hp9k8[67]1 | hp8[67]1 | hp9k80[24] | hp80[24] | hp9k8[78]9 | hp8[78]9 | hp9k893 | hp893) + # FIXME: really hppa2.0-hp + basic_machine=hppa1.1-hp + ;; + hp9k8[0-9][13679] | hp8[0-9][13679]) + basic_machine=hppa1.1-hp + ;; + hp9k8[0-9][0-9] | hp8[0-9][0-9]) + basic_machine=hppa1.0-hp + ;; + hppa-next) + os=-nextstep3 + ;; + hppaosf) + basic_machine=hppa1.1-hp + os=-osf + ;; + hppro) + basic_machine=hppa1.1-hp + os=-proelf + ;; + i370-ibm* | ibm*) + basic_machine=i370-ibm + ;; + i*86v32) + basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` + os=-sysv32 + ;; + i*86v4*) + basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` + os=-sysv4 + ;; + i*86v) + basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` + os=-sysv + ;; + i*86sol2) + basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` + os=-solaris2 + ;; + i386mach) + basic_machine=i386-mach + os=-mach + ;; + i386-vsta | vsta) + basic_machine=i386-unknown + os=-vsta + ;; + iris | iris4d) + basic_machine=mips-sgi + case $os in + -irix*) + ;; + *) + os=-irix4 + ;; + esac + ;; + isi68 | isi) + basic_machine=m68k-isi + os=-sysv + ;; + m68knommu) + basic_machine=m68k-unknown + os=-linux + ;; + m68knommu-*) + basic_machine=m68k-`echo $basic_machine | sed 's/^[^-]*-//'` + os=-linux + ;; + m88k-omron*) + basic_machine=m88k-omron + ;; + magnum | m3230) + basic_machine=mips-mips + os=-sysv + ;; + merlin) + basic_machine=ns32k-utek + os=-sysv + ;; + microblaze) + basic_machine=microblaze-xilinx + ;; + mingw32) + basic_machine=i386-pc + os=-mingw32 + ;; + mingw32ce) + basic_machine=arm-unknown + os=-mingw32ce + ;; + miniframe) + basic_machine=m68000-convergent + ;; + *mint | -mint[0-9]* | *MiNT | *MiNT[0-9]*) + basic_machine=m68k-atari + os=-mint + ;; + mips3*-*) + basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'` + ;; + mips3*) + basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'`-unknown + ;; + monitor) + basic_machine=m68k-rom68k + os=-coff + ;; + morphos) + basic_machine=powerpc-unknown + os=-morphos + ;; + msdos) + basic_machine=i386-pc + os=-msdos + ;; + ms1-*) + basic_machine=`echo $basic_machine | sed -e 's/ms1-/mt-/'` + ;; + msys) + basic_machine=i386-pc + os=-msys + ;; + mvs) + basic_machine=i370-ibm + os=-mvs + ;; + nacl) + basic_machine=le32-unknown + os=-nacl + ;; + ncr3000) + basic_machine=i486-ncr + os=-sysv4 + ;; + netbsd386) + basic_machine=i386-unknown + os=-netbsd + ;; + netwinder) + basic_machine=armv4l-rebel + os=-linux + ;; + news | news700 | news800 | news900) + basic_machine=m68k-sony + os=-newsos + ;; + news1000) + basic_machine=m68030-sony + os=-newsos + ;; + news-3600 | risc-news) + basic_machine=mips-sony + os=-newsos + ;; + necv70) + basic_machine=v70-nec + os=-sysv + ;; + next | m*-next ) + basic_machine=m68k-next + case $os in + -nextstep* ) + ;; + -ns2*) + os=-nextstep2 + ;; + *) + os=-nextstep3 + ;; + esac + ;; + nh3000) + basic_machine=m68k-harris + os=-cxux + ;; + nh[45]000) + basic_machine=m88k-harris + os=-cxux + ;; + nindy960) + basic_machine=i960-intel + os=-nindy + ;; + mon960) + basic_machine=i960-intel + os=-mon960 + ;; + nonstopux) + basic_machine=mips-compaq + os=-nonstopux + ;; + np1) + basic_machine=np1-gould + ;; + neo-tandem) + basic_machine=neo-tandem + ;; + nse-tandem) + basic_machine=nse-tandem + ;; + nsr-tandem) + basic_machine=nsr-tandem + ;; + op50n-* | op60c-*) + basic_machine=hppa1.1-oki + os=-proelf + ;; + openrisc | openrisc-*) + basic_machine=or32-unknown + ;; + os400) + basic_machine=powerpc-ibm + os=-os400 + ;; + OSE68000 | ose68000) + basic_machine=m68000-ericsson + os=-ose + ;; + os68k) + basic_machine=m68k-none + os=-os68k + ;; + pa-hitachi) + basic_machine=hppa1.1-hitachi + os=-hiuxwe2 + ;; + paragon) + basic_machine=i860-intel + os=-osf + ;; + parisc) + basic_machine=hppa-unknown + os=-linux + ;; + parisc-*) + basic_machine=hppa-`echo $basic_machine | sed 's/^[^-]*-//'` + os=-linux + ;; + pbd) + basic_machine=sparc-tti + ;; + pbb) + basic_machine=m68k-tti + ;; + pc532 | pc532-*) + basic_machine=ns32k-pc532 + ;; + pc98) + basic_machine=i386-pc + ;; + pc98-*) + basic_machine=i386-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + pentium | p5 | k5 | k6 | nexgen | viac3) + basic_machine=i586-pc + ;; + pentiumpro | p6 | 6x86 | athlon | athlon_*) + basic_machine=i686-pc + ;; + pentiumii | pentium2 | pentiumiii | pentium3) + basic_machine=i686-pc + ;; + pentium4) + basic_machine=i786-pc + ;; + pentium-* | p5-* | k5-* | k6-* | nexgen-* | viac3-*) + basic_machine=i586-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + pentiumpro-* | p6-* | 6x86-* | athlon-*) + basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + pentiumii-* | pentium2-* | pentiumiii-* | pentium3-*) + basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + pentium4-*) + basic_machine=i786-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + pn) + basic_machine=pn-gould + ;; + power) basic_machine=power-ibm + ;; + ppc | ppcbe) basic_machine=powerpc-unknown + ;; + ppc-* | ppcbe-*) + basic_machine=powerpc-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + ppcle | powerpclittle | ppc-le | powerpc-little) + basic_machine=powerpcle-unknown + ;; + ppcle-* | powerpclittle-*) + basic_machine=powerpcle-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + ppc64) basic_machine=powerpc64-unknown + ;; + ppc64-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + ppc64le | powerpc64little | ppc64-le | powerpc64-little) + basic_machine=powerpc64le-unknown + ;; + ppc64le-* | powerpc64little-*) + basic_machine=powerpc64le-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + ps2) + basic_machine=i386-ibm + ;; + pw32) + basic_machine=i586-unknown + os=-pw32 + ;; + rdos) + basic_machine=i386-pc + os=-rdos + ;; + rom68k) + basic_machine=m68k-rom68k + os=-coff + ;; + rm[46]00) + basic_machine=mips-siemens + ;; + rtpc | rtpc-*) + basic_machine=romp-ibm + ;; + s390 | s390-*) + basic_machine=s390-ibm + ;; + s390x | s390x-*) + basic_machine=s390x-ibm + ;; + sa29200) + basic_machine=a29k-amd + os=-udi + ;; + sb1) + basic_machine=mipsisa64sb1-unknown + ;; + sb1el) + basic_machine=mipsisa64sb1el-unknown + ;; + sde) + basic_machine=mipsisa32-sde + os=-elf + ;; + sei) + basic_machine=mips-sei + os=-seiux + ;; + sequent) + basic_machine=i386-sequent + ;; + sh) + basic_machine=sh-hitachi + os=-hms + ;; + sh5el) + basic_machine=sh5le-unknown + ;; + sh64) + basic_machine=sh64-unknown + ;; + sparclite-wrs | simso-wrs) + basic_machine=sparclite-wrs + os=-vxworks + ;; + sps7) + basic_machine=m68k-bull + os=-sysv2 + ;; + spur) + basic_machine=spur-unknown + ;; + st2000) + basic_machine=m68k-tandem + ;; + stratus) + basic_machine=i860-stratus + os=-sysv4 + ;; + strongarm-* | thumb-*) + basic_machine=arm-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + sun2) + basic_machine=m68000-sun + ;; + sun2os3) + basic_machine=m68000-sun + os=-sunos3 + ;; + sun2os4) + basic_machine=m68000-sun + os=-sunos4 + ;; + sun3os3) + basic_machine=m68k-sun + os=-sunos3 + ;; + sun3os4) + basic_machine=m68k-sun + os=-sunos4 + ;; + sun4os3) + basic_machine=sparc-sun + os=-sunos3 + ;; + sun4os4) + basic_machine=sparc-sun + os=-sunos4 + ;; + sun4sol2) + basic_machine=sparc-sun + os=-solaris2 + ;; + sun3 | sun3-*) + basic_machine=m68k-sun + ;; + sun4) + basic_machine=sparc-sun + ;; + sun386 | sun386i | roadrunner) + basic_machine=i386-sun + ;; + sv1) + basic_machine=sv1-cray + os=-unicos + ;; + symmetry) + basic_machine=i386-sequent + os=-dynix + ;; + t3e) + basic_machine=alphaev5-cray + os=-unicos + ;; + t90) + basic_machine=t90-cray + os=-unicos + ;; + tile*) + basic_machine=$basic_machine-unknown + os=-linux-gnu + ;; + tx39) + basic_machine=mipstx39-unknown + ;; + tx39el) + basic_machine=mipstx39el-unknown + ;; + toad1) + basic_machine=pdp10-xkl + os=-tops20 + ;; + tower | tower-32) + basic_machine=m68k-ncr + ;; + tpf) + basic_machine=s390x-ibm + os=-tpf + ;; + udi29k) + basic_machine=a29k-amd + os=-udi + ;; + ultra3) + basic_machine=a29k-nyu + os=-sym1 + ;; + v810 | necv810) + basic_machine=v810-nec + os=-none + ;; + vaxv) + basic_machine=vax-dec + os=-sysv + ;; + vms) + basic_machine=vax-dec + os=-vms + ;; + vpp*|vx|vx-*) + basic_machine=f301-fujitsu + ;; + vxworks960) + basic_machine=i960-wrs + os=-vxworks + ;; + vxworks68) + basic_machine=m68k-wrs + os=-vxworks + ;; + vxworks29k) + basic_machine=a29k-wrs + os=-vxworks + ;; + w65*) + basic_machine=w65-wdc + os=-none + ;; + w89k-*) + basic_machine=hppa1.1-winbond + os=-proelf + ;; + xbox) + basic_machine=i686-pc + os=-mingw32 + ;; + xps | xps100) + basic_machine=xps100-honeywell + ;; + xscale-* | xscalee[bl]-*) + basic_machine=`echo $basic_machine | sed 's/^xscale/arm/'` + ;; + ymp) + basic_machine=ymp-cray + os=-unicos + ;; + z8k-*-coff) + basic_machine=z8k-unknown + os=-sim + ;; + z80-*-coff) + basic_machine=z80-unknown + os=-sim + ;; + none) + basic_machine=none-none + os=-none + ;; + +# Here we handle the default manufacturer of certain CPU types. It is in +# some cases the only manufacturer, in others, it is the most popular. + w89k) + basic_machine=hppa1.1-winbond + ;; + op50n) + basic_machine=hppa1.1-oki + ;; + op60c) + basic_machine=hppa1.1-oki + ;; + romp) + basic_machine=romp-ibm + ;; + mmix) + basic_machine=mmix-knuth + ;; + rs6000) + basic_machine=rs6000-ibm + ;; + vax) + basic_machine=vax-dec + ;; + pdp10) + # there are many clones, so DEC is not a safe bet + basic_machine=pdp10-unknown + ;; + pdp11) + basic_machine=pdp11-dec + ;; + we32k) + basic_machine=we32k-att + ;; + sh[1234] | sh[24]a | sh[24]aeb | sh[34]eb | sh[1234]le | sh[23]ele) + basic_machine=sh-unknown + ;; + sparc | sparcv8 | sparcv9 | sparcv9b | sparcv9v) + basic_machine=sparc-sun + ;; + cydra) + basic_machine=cydra-cydrome + ;; + orion) + basic_machine=orion-highlevel + ;; + orion105) + basic_machine=clipper-highlevel + ;; + mac | mpw | mac-mpw) + basic_machine=m68k-apple + ;; + pmac | pmac-mpw) + basic_machine=powerpc-apple + ;; + *-unknown) + # Make sure to match an already-canonicalized machine name. + ;; + *) + echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2 + exit 1 + ;; +esac + +# Here we canonicalize certain aliases for manufacturers. +case $basic_machine in + *-digital*) + basic_machine=`echo $basic_machine | sed 's/digital.*/dec/'` + ;; + *-commodore*) + basic_machine=`echo $basic_machine | sed 's/commodore.*/cbm/'` + ;; + *) + ;; +esac + +# Decode manufacturer-specific aliases for certain operating systems. + +if [ x"$os" != x"" ] +then +case $os in + # First match some system type aliases + # that might get confused with valid system types. + # -solaris* is a basic system type, with this one exception. + -auroraux) + os=-auroraux + ;; + -solaris1 | -solaris1.*) + os=`echo $os | sed -e 's|solaris1|sunos4|'` + ;; + -solaris) + os=-solaris2 + ;; + -svr4*) + os=-sysv4 + ;; + -unixware*) + os=-sysv4.2uw + ;; + -gnu/linux*) + os=`echo $os | sed -e 's|gnu/linux|linux-gnu|'` + ;; + # First accept the basic system types. + # The portable systems comes first. + # Each alternative MUST END IN A *, to match a version number. + # -sysv* is not here because it comes later, after sysvr4. + -gnu* | -bsd* | -mach* | -minix* | -genix* | -ultrix* | -irix* \ + | -*vms* | -sco* | -esix* | -isc* | -aix* | -cnk* | -sunos | -sunos[34]*\ + | -hpux* | -unos* | -osf* | -luna* | -dgux* | -auroraux* | -solaris* \ + | -sym* | -kopensolaris* \ + | -amigaos* | -amigados* | -msdos* | -newsos* | -unicos* | -aof* \ + | -aos* | -aros* \ + | -nindy* | -vxsim* | -vxworks* | -ebmon* | -hms* | -mvs* \ + | -clix* | -riscos* | -uniplus* | -iris* | -rtu* | -xenix* \ + | -hiux* | -386bsd* | -knetbsd* | -mirbsd* | -netbsd* \ + | -openbsd* | -solidbsd* \ + | -ekkobsd* | -kfreebsd* | -freebsd* | -riscix* | -lynxos* \ + | -bosx* | -nextstep* | -cxux* | -aout* | -elf* | -oabi* \ + | -ptx* | -coff* | -ecoff* | -winnt* | -domain* | -vsta* \ + | -udi* | -eabi* | -lites* | -ieee* | -go32* | -aux* \ + | -chorusos* | -chorusrdb* | -cegcc* \ + | -cygwin* | -msys* | -pe* | -psos* | -moss* | -proelf* | -rtems* \ + | -mingw32* | -linux-gnu* | -linux-android* \ + | -linux-newlib* | -linux-uclibc* \ + | -uxpv* | -beos* | -mpeix* | -udk* \ + | -interix* | -uwin* | -mks* | -rhapsody* | -darwin* | -opened* \ + | -openstep* | -oskit* | -conix* | -pw32* | -nonstopux* \ + | -storm-chaos* | -tops10* | -tenex* | -tops20* | -its* \ + | -os2* | -vos* | -palmos* | -uclinux* | -nucleus* \ + | -morphos* | -superux* | -rtmk* | -rtmk-nova* | -windiss* \ + | -powermax* | -dnix* | -nx6 | -nx7 | -sei* | -dragonfly* \ + | -skyos* | -haiku* | -rdos* | -toppers* | -drops* | -es*) + # Remember, each alternative MUST END IN *, to match a version number. + ;; + -qnx*) + case $basic_machine in + x86-* | i*86-*) + ;; + *) + os=-nto$os + ;; + esac + ;; + -nto-qnx*) + ;; + -nto*) + os=`echo $os | sed -e 's|nto|nto-qnx|'` + ;; + -sim | -es1800* | -hms* | -xray | -os68k* | -none* | -v88r* \ + | -windows* | -osx | -abug | -netware* | -os9* | -beos* | -haiku* \ + | -macos* | -mpw* | -magic* | -mmixware* | -mon960* | -lnews*) + ;; + -mac*) + os=`echo $os | sed -e 's|mac|macos|'` + ;; + -linux-dietlibc) + os=-linux-dietlibc + ;; + -linux*) + os=`echo $os | sed -e 's|linux|linux-gnu|'` + ;; + -sunos5*) + os=`echo $os | sed -e 's|sunos5|solaris2|'` + ;; + -sunos6*) + os=`echo $os | sed -e 's|sunos6|solaris3|'` + ;; + -opened*) + os=-openedition + ;; + -os400*) + os=-os400 + ;; + -wince*) + os=-wince + ;; + -osfrose*) + os=-osfrose + ;; + -osf*) + os=-osf + ;; + -utek*) + os=-bsd + ;; + -dynix*) + os=-bsd + ;; + -acis*) + os=-aos + ;; + -atheos*) + os=-atheos + ;; + -syllable*) + os=-syllable + ;; + -386bsd) + os=-bsd + ;; + -ctix* | -uts*) + os=-sysv + ;; + -nova*) + os=-rtmk-nova + ;; + -ns2 ) + os=-nextstep2 + ;; + -nsk*) + os=-nsk + ;; + # Preserve the version number of sinix5. + -sinix5.*) + os=`echo $os | sed -e 's|sinix|sysv|'` + ;; + -sinix*) + os=-sysv4 + ;; + -tpf*) + os=-tpf + ;; + -triton*) + os=-sysv3 + ;; + -oss*) + os=-sysv3 + ;; + -svr4) + os=-sysv4 + ;; + -svr3) + os=-sysv3 + ;; + -sysvr4) + os=-sysv4 + ;; + # This must come after -sysvr4. + -sysv*) + ;; + -ose*) + os=-ose + ;; + -es1800*) + os=-ose + ;; + -xenix) + os=-xenix + ;; + -*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*) + os=-mint + ;; + -aros*) + os=-aros + ;; + -kaos*) + os=-kaos + ;; + -zvmoe) + os=-zvmoe + ;; + -dicos*) + os=-dicos + ;; + -nacl*) + ;; + -none) + ;; + *) + # Get rid of the `-' at the beginning of $os. + os=`echo $os | sed 's/[^-]*-//'` + echo Invalid configuration \`$1\': system \`$os\' not recognized 1>&2 + exit 1 + ;; +esac +else + +# Here we handle the default operating systems that come with various machines. +# The value should be what the vendor currently ships out the door with their +# machine or put another way, the most popular os provided with the machine. + +# Note that if you're going to try to match "-MANUFACTURER" here (say, +# "-sun"), then you have to tell the case statement up towards the top +# that MANUFACTURER isn't an operating system. Otherwise, code above +# will signal an error saying that MANUFACTURER isn't an operating +# system, and we'll never get to this point. + +case $basic_machine in + score-*) + os=-elf + ;; + spu-*) + os=-elf + ;; + *-acorn) + os=-riscix1.2 + ;; + arm*-rebel) + os=-linux + ;; + arm*-semi) + os=-aout + ;; + c4x-* | tic4x-*) + os=-coff + ;; + tic54x-*) + os=-coff + ;; + tic55x-*) + os=-coff + ;; + tic6x-*) + os=-coff + ;; + # This must come before the *-dec entry. + pdp10-*) + os=-tops20 + ;; + pdp11-*) + os=-none + ;; + *-dec | vax-*) + os=-ultrix4.2 + ;; + m68*-apollo) + os=-domain + ;; + i386-sun) + os=-sunos4.0.2 + ;; + m68000-sun) + os=-sunos3 + ;; + m68*-cisco) + os=-aout + ;; + mep-*) + os=-elf + ;; + mips*-cisco) + os=-elf + ;; + mips*-*) + os=-elf + ;; + or32-*) + os=-coff + ;; + *-tti) # must be before sparc entry or we get the wrong os. + os=-sysv3 + ;; + sparc-* | *-sun) + os=-sunos4.1.1 + ;; + *-be) + os=-beos + ;; + *-haiku) + os=-haiku + ;; + *-ibm) + os=-aix + ;; + *-knuth) + os=-mmixware + ;; + *-wec) + os=-proelf + ;; + *-winbond) + os=-proelf + ;; + *-oki) + os=-proelf + ;; + *-hp) + os=-hpux + ;; + *-hitachi) + os=-hiux + ;; + i860-* | *-att | *-ncr | *-altos | *-motorola | *-convergent) + os=-sysv + ;; + *-cbm) + os=-amigaos + ;; + *-dg) + os=-dgux + ;; + *-dolphin) + os=-sysv3 + ;; + m68k-ccur) + os=-rtu + ;; + m88k-omron*) + os=-luna + ;; + *-next ) + os=-nextstep + ;; + *-sequent) + os=-ptx + ;; + *-crds) + os=-unos + ;; + *-ns) + os=-genix + ;; + i370-*) + os=-mvs + ;; + *-next) + os=-nextstep3 + ;; + *-gould) + os=-sysv + ;; + *-highlevel) + os=-bsd + ;; + *-encore) + os=-bsd + ;; + *-sgi) + os=-irix + ;; + *-siemens) + os=-sysv4 + ;; + *-masscomp) + os=-rtu + ;; + f30[01]-fujitsu | f700-fujitsu) + os=-uxpv + ;; + *-rom68k) + os=-coff + ;; + *-*bug) + os=-coff + ;; + *-apple) + os=-macos + ;; + *-atari*) + os=-mint + ;; + *) + os=-none + ;; +esac +fi + +# Here we handle the case where we know the os, and the CPU type, but not the +# manufacturer. We pick the logical manufacturer. +vendor=unknown +case $basic_machine in + *-unknown) + case $os in + -riscix*) + vendor=acorn + ;; + -sunos*) + vendor=sun + ;; + -cnk*|-aix*) + vendor=ibm + ;; + -beos*) + vendor=be + ;; + -hpux*) + vendor=hp + ;; + -mpeix*) + vendor=hp + ;; + -hiux*) + vendor=hitachi + ;; + -unos*) + vendor=crds + ;; + -dgux*) + vendor=dg + ;; + -luna*) + vendor=omron + ;; + -genix*) + vendor=ns + ;; + -mvs* | -opened*) + vendor=ibm + ;; + -os400*) + vendor=ibm + ;; + -ptx*) + vendor=sequent + ;; + -tpf*) + vendor=ibm + ;; + -vxsim* | -vxworks* | -windiss*) + vendor=wrs + ;; + -aux*) + vendor=apple + ;; + -hms*) + vendor=hitachi + ;; + -mpw* | -macos*) + vendor=apple + ;; + -*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*) + vendor=atari + ;; + -vos*) + vendor=stratus + ;; + esac + basic_machine=`echo $basic_machine | sed "s/unknown/$vendor/"` + ;; +esac + +echo $basic_machine$os +exit + +# Local variables: +# eval: (add-hook 'write-file-hooks 'time-stamp) +# time-stamp-start: "timestamp='" +# time-stamp-format: "%:y-%02m-%02d" +# time-stamp-end: "'" +# End: diff --git a/vendor/libssh2/configure b/vendor/libssh2/configure new file mode 100755 index 000000000..8d323ac1a --- /dev/null +++ b/vendor/libssh2/configure @@ -0,0 +1,19935 @@ +#! /bin/sh +# Guess values for system-dependent variables and create Makefiles. +# Generated by GNU Autoconf 2.69 for libssh2 -. +# +# Report bugs to . +# +# +# Copyright (C) 1992-1996, 1998-2012 Free Software Foundation, Inc. +# +# +# This configure script is free software; the Free Software Foundation +# gives unlimited permission to copy, distribute and modify it. +## -------------------- ## +## M4sh Initialization. ## +## -------------------- ## + +# Be more Bourne compatible +DUALCASE=1; export DUALCASE # for MKS sh +if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : + emulate sh + NULLCMD=: + # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which + # is contrary to our usage. Disable this feature. + alias -g '${1+"$@"}'='"$@"' + setopt NO_GLOB_SUBST +else + case `(set -o) 2>/dev/null` in #( + *posix*) : + set -o posix ;; #( + *) : + ;; +esac +fi + + +as_nl=' +' +export as_nl +# Printing a long string crashes Solaris 7 /usr/bin/printf. +as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' +as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo +as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo +# Prefer a ksh shell builtin over an external printf program on Solaris, +# but without wasting forks for bash or zsh. +if test -z "$BASH_VERSION$ZSH_VERSION" \ + && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then + as_echo='print -r --' + as_echo_n='print -rn --' +elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then + as_echo='printf %s\n' + as_echo_n='printf %s' +else + if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then + as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' + as_echo_n='/usr/ucb/echo -n' + else + as_echo_body='eval expr "X$1" : "X\\(.*\\)"' + as_echo_n_body='eval + arg=$1; + case $arg in #( + *"$as_nl"*) + expr "X$arg" : "X\\(.*\\)$as_nl"; + arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; + esac; + expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" + ' + export as_echo_n_body + as_echo_n='sh -c $as_echo_n_body as_echo' + fi + export as_echo_body + as_echo='sh -c $as_echo_body as_echo' +fi + +# The user is always right. +if test "${PATH_SEPARATOR+set}" != set; then + PATH_SEPARATOR=: + (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { + (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || + PATH_SEPARATOR=';' + } +fi + + +# IFS +# We need space, tab and new line, in precisely that order. Quoting is +# there to prevent editors from complaining about space-tab. +# (If _AS_PATH_WALK were called with IFS unset, it would disable word +# splitting by setting IFS to empty value.) +IFS=" "" $as_nl" + +# Find who we are. Look in the path if we contain no directory separator. +as_myself= +case $0 in #(( + *[\\/]* ) as_myself=$0 ;; + *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break + done +IFS=$as_save_IFS + + ;; +esac +# We did not find ourselves, most probably we were run as `sh COMMAND' +# in which case we are not to be found in the path. +if test "x$as_myself" = x; then + as_myself=$0 +fi +if test ! -f "$as_myself"; then + $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 + exit 1 +fi + +# Unset variables that we do not need and which cause bugs (e.g. in +# pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" +# suppresses any "Segmentation fault" message there. '((' could +# trigger a bug in pdksh 5.2.14. +for as_var in BASH_ENV ENV MAIL MAILPATH +do eval test x\${$as_var+set} = xset \ + && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : +done +PS1='$ ' +PS2='> ' +PS4='+ ' + +# NLS nuisances. +LC_ALL=C +export LC_ALL +LANGUAGE=C +export LANGUAGE + +# CDPATH. +(unset CDPATH) >/dev/null 2>&1 && unset CDPATH + +# Use a proper internal environment variable to ensure we don't fall + # into an infinite loop, continuously re-executing ourselves. + if test x"${_as_can_reexec}" != xno && test "x$CONFIG_SHELL" != x; then + _as_can_reexec=no; export _as_can_reexec; + # We cannot yet assume a decent shell, so we have to provide a +# neutralization value for shells without unset; and this also +# works around shells that cannot unset nonexistent variables. +# Preserve -v and -x to the replacement shell. +BASH_ENV=/dev/null +ENV=/dev/null +(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV +case $- in # (((( + *v*x* | *x*v* ) as_opts=-vx ;; + *v* ) as_opts=-v ;; + *x* ) as_opts=-x ;; + * ) as_opts= ;; +esac +exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} +# Admittedly, this is quite paranoid, since all the known shells bail +# out after a failed `exec'. +$as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2 +as_fn_exit 255 + fi + # We don't want this to propagate to other subprocesses. + { _as_can_reexec=; unset _as_can_reexec;} +if test "x$CONFIG_SHELL" = x; then + as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then : + emulate sh + NULLCMD=: + # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which + # is contrary to our usage. Disable this feature. + alias -g '\${1+\"\$@\"}'='\"\$@\"' + setopt NO_GLOB_SUBST +else + case \`(set -o) 2>/dev/null\` in #( + *posix*) : + set -o posix ;; #( + *) : + ;; +esac +fi +" + as_required="as_fn_return () { (exit \$1); } +as_fn_success () { as_fn_return 0; } +as_fn_failure () { as_fn_return 1; } +as_fn_ret_success () { return 0; } +as_fn_ret_failure () { return 1; } + +exitcode=0 +as_fn_success || { exitcode=1; echo as_fn_success failed.; } +as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; } +as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; } +as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; } +if ( set x; as_fn_ret_success y && test x = \"\$1\" ); then : + +else + exitcode=1; echo positional parameters were not saved. +fi +test x\$exitcode = x0 || exit 1 +test -x / || exit 1" + as_suggested=" as_lineno_1=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_1a=\$LINENO + as_lineno_2=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_2a=\$LINENO + eval 'test \"x\$as_lineno_1'\$as_run'\" != \"x\$as_lineno_2'\$as_run'\" && + test \"x\`expr \$as_lineno_1'\$as_run' + 1\`\" = \"x\$as_lineno_2'\$as_run'\"' || exit 1 +test \$(( 1 + 1 )) = 2 || exit 1 + + test -n \"\${ZSH_VERSION+set}\${BASH_VERSION+set}\" || ( + ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' + ECHO=\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO + ECHO=\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO + PATH=/empty FPATH=/empty; export PATH FPATH + test \"X\`printf %s \$ECHO\`\" = \"X\$ECHO\" \\ + || test \"X\`print -r -- \$ECHO\`\" = \"X\$ECHO\" ) || exit 1" + if (eval "$as_required") 2>/dev/null; then : + as_have_required=yes +else + as_have_required=no +fi + if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null; then : + +else + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +as_found=false +for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + as_found=: + case $as_dir in #( + /*) + for as_base in sh bash ksh sh5; do + # Try only shells that exist, to save several forks. + as_shell=$as_dir/$as_base + if { test -f "$as_shell" || test -f "$as_shell.exe"; } && + { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$as_shell"; } 2>/dev/null; then : + CONFIG_SHELL=$as_shell as_have_required=yes + if { $as_echo "$as_bourne_compatible""$as_suggested" | as_run=a "$as_shell"; } 2>/dev/null; then : + break 2 +fi +fi + done;; + esac + as_found=false +done +$as_found || { if { test -f "$SHELL" || test -f "$SHELL.exe"; } && + { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$SHELL"; } 2>/dev/null; then : + CONFIG_SHELL=$SHELL as_have_required=yes +fi; } +IFS=$as_save_IFS + + + if test "x$CONFIG_SHELL" != x; then : + export CONFIG_SHELL + # We cannot yet assume a decent shell, so we have to provide a +# neutralization value for shells without unset; and this also +# works around shells that cannot unset nonexistent variables. +# Preserve -v and -x to the replacement shell. +BASH_ENV=/dev/null +ENV=/dev/null +(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV +case $- in # (((( + *v*x* | *x*v* ) as_opts=-vx ;; + *v* ) as_opts=-v ;; + *x* ) as_opts=-x ;; + * ) as_opts= ;; +esac +exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} +# Admittedly, this is quite paranoid, since all the known shells bail +# out after a failed `exec'. +$as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2 +exit 255 +fi + + if test x$as_have_required = xno; then : + $as_echo "$0: This script requires a shell more modern than all" + $as_echo "$0: the shells that I found on your system." + if test x${ZSH_VERSION+set} = xset ; then + $as_echo "$0: In particular, zsh $ZSH_VERSION has bugs and should" + $as_echo "$0: be upgraded to zsh 4.3.4 or later." + else + $as_echo "$0: Please tell bug-autoconf@gnu.org and +$0: libssh2-devel@cool.haxx.se about your system, including +$0: any error possibly output before this message. Then +$0: install a modern shell, or manually run the script +$0: under such a shell if you do have one." + fi + exit 1 +fi +fi +fi +SHELL=${CONFIG_SHELL-/bin/sh} +export SHELL +# Unset more variables known to interfere with behavior of common tools. +CLICOLOR_FORCE= GREP_OPTIONS= +unset CLICOLOR_FORCE GREP_OPTIONS + +## --------------------- ## +## M4sh Shell Functions. ## +## --------------------- ## +# as_fn_unset VAR +# --------------- +# Portably unset VAR. +as_fn_unset () +{ + { eval $1=; unset $1;} +} +as_unset=as_fn_unset + +# as_fn_set_status STATUS +# ----------------------- +# Set $? to STATUS, without forking. +as_fn_set_status () +{ + return $1 +} # as_fn_set_status + +# as_fn_exit STATUS +# ----------------- +# Exit the shell with STATUS, even in a "trap 0" or "set -e" context. +as_fn_exit () +{ + set +e + as_fn_set_status $1 + exit $1 +} # as_fn_exit + +# as_fn_mkdir_p +# ------------- +# Create "$as_dir" as a directory, including parents if necessary. +as_fn_mkdir_p () +{ + + case $as_dir in #( + -*) as_dir=./$as_dir;; + esac + test -d "$as_dir" || eval $as_mkdir_p || { + as_dirs= + while :; do + case $as_dir in #( + *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( + *) as_qdir=$as_dir;; + esac + as_dirs="'$as_qdir' $as_dirs" + as_dir=`$as_dirname -- "$as_dir" || +$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$as_dir" : 'X\(//\)[^/]' \| \ + X"$as_dir" : 'X\(//\)$' \| \ + X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || +$as_echo X"$as_dir" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + test -d "$as_dir" && break + done + test -z "$as_dirs" || eval "mkdir $as_dirs" + } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" + + +} # as_fn_mkdir_p + +# as_fn_executable_p FILE +# ----------------------- +# Test if FILE is an executable regular file. +as_fn_executable_p () +{ + test -f "$1" && test -x "$1" +} # as_fn_executable_p +# as_fn_append VAR VALUE +# ---------------------- +# Append the text in VALUE to the end of the definition contained in VAR. Take +# advantage of any shell optimizations that allow amortized linear growth over +# repeated appends, instead of the typical quadratic growth present in naive +# implementations. +if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : + eval 'as_fn_append () + { + eval $1+=\$2 + }' +else + as_fn_append () + { + eval $1=\$$1\$2 + } +fi # as_fn_append + +# as_fn_arith ARG... +# ------------------ +# Perform arithmetic evaluation on the ARGs, and store the result in the +# global $as_val. Take advantage of shells that can avoid forks. The arguments +# must be portable across $(()) and expr. +if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : + eval 'as_fn_arith () + { + as_val=$(( $* )) + }' +else + as_fn_arith () + { + as_val=`expr "$@" || test $? -eq 1` + } +fi # as_fn_arith + + +# as_fn_error STATUS ERROR [LINENO LOG_FD] +# ---------------------------------------- +# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are +# provided, also output the error to LOG_FD, referencing LINENO. Then exit the +# script with STATUS, using 1 if that was 0. +as_fn_error () +{ + as_status=$1; test $as_status -eq 0 && as_status=1 + if test "$4"; then + as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 + fi + $as_echo "$as_me: error: $2" >&2 + as_fn_exit $as_status +} # as_fn_error + +if expr a : '\(a\)' >/dev/null 2>&1 && + test "X`expr 00001 : '.*\(...\)'`" = X001; then + as_expr=expr +else + as_expr=false +fi + +if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then + as_basename=basename +else + as_basename=false +fi + +if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then + as_dirname=dirname +else + as_dirname=false +fi + +as_me=`$as_basename -- "$0" || +$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ + X"$0" : 'X\(//\)$' \| \ + X"$0" : 'X\(/\)' \| . 2>/dev/null || +$as_echo X/"$0" | + sed '/^.*\/\([^/][^/]*\)\/*$/{ + s//\1/ + q + } + /^X\/\(\/\/\)$/{ + s//\1/ + q + } + /^X\/\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + +# Avoid depending upon Character Ranges. +as_cr_letters='abcdefghijklmnopqrstuvwxyz' +as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' +as_cr_Letters=$as_cr_letters$as_cr_LETTERS +as_cr_digits='0123456789' +as_cr_alnum=$as_cr_Letters$as_cr_digits + + + as_lineno_1=$LINENO as_lineno_1a=$LINENO + as_lineno_2=$LINENO as_lineno_2a=$LINENO + eval 'test "x$as_lineno_1'$as_run'" != "x$as_lineno_2'$as_run'" && + test "x`expr $as_lineno_1'$as_run' + 1`" = "x$as_lineno_2'$as_run'"' || { + # Blame Lee E. McMahon (1931-1989) for sed's syntax. :-) + sed -n ' + p + /[$]LINENO/= + ' <$as_myself | + sed ' + s/[$]LINENO.*/&-/ + t lineno + b + :lineno + N + :loop + s/[$]LINENO\([^'$as_cr_alnum'_].*\n\)\(.*\)/\2\1\2/ + t loop + s/-\n.*// + ' >$as_me.lineno && + chmod +x "$as_me.lineno" || + { $as_echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2; as_fn_exit 1; } + + # If we had to re-execute with $CONFIG_SHELL, we're ensured to have + # already done that, so ensure we don't try to do so again and fall + # in an infinite loop. This has already happened in practice. + _as_can_reexec=no; export _as_can_reexec + # Don't try to exec as it changes $[0], causing all sort of problems + # (the dirname of $[0] is not the place where we might find the + # original and so on. Autoconf is especially sensitive to this). + . "./$as_me.lineno" + # Exit status is that of the last command. + exit +} + +ECHO_C= ECHO_N= ECHO_T= +case `echo -n x` in #((((( +-n*) + case `echo 'xy\c'` in + *c*) ECHO_T=' ';; # ECHO_T is single tab character. + xy) ECHO_C='\c';; + *) echo `echo ksh88 bug on AIX 6.1` > /dev/null + ECHO_T=' ';; + esac;; +*) + ECHO_N='-n';; +esac + +rm -f conf$$ conf$$.exe conf$$.file +if test -d conf$$.dir; then + rm -f conf$$.dir/conf$$.file +else + rm -f conf$$.dir + mkdir conf$$.dir 2>/dev/null +fi +if (echo >conf$$.file) 2>/dev/null; then + if ln -s conf$$.file conf$$ 2>/dev/null; then + as_ln_s='ln -s' + # ... but there are two gotchas: + # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. + # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. + # In both cases, we have to default to `cp -pR'. + ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || + as_ln_s='cp -pR' + elif ln conf$$.file conf$$ 2>/dev/null; then + as_ln_s=ln + else + as_ln_s='cp -pR' + fi +else + as_ln_s='cp -pR' +fi +rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file +rmdir conf$$.dir 2>/dev/null + +if mkdir -p . 2>/dev/null; then + as_mkdir_p='mkdir -p "$as_dir"' +else + test -d ./-p && rmdir ./-p + as_mkdir_p=false +fi + +as_test_x='test -x' +as_executable_p=as_fn_executable_p + +# Sed expression to map a string onto a valid CPP name. +as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" + +# Sed expression to map a string onto a valid variable name. +as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" + +SHELL=${CONFIG_SHELL-/bin/sh} + + +test -n "$DJDIR" || exec 7<&0 &1 + +# Name of the host. +# hostname on some systems (SVR3.2, old GNU/Linux) returns a bogus exit status, +# so uname gets run too. +ac_hostname=`(hostname || uname -n) 2>/dev/null | sed 1q` + +# +# Initializations. +# +ac_default_prefix=/usr/local +ac_clean_files= +ac_config_libobj_dir=. +LIBOBJS= +cross_compiling=no +subdirs= +MFLAGS= +MAKEFLAGS= + +# Identity of this package. +PACKAGE_NAME='libssh2' +PACKAGE_TARNAME='libssh2' +PACKAGE_VERSION='-' +PACKAGE_STRING='libssh2 -' +PACKAGE_BUGREPORT='libssh2-devel@cool.haxx.se' +PACKAGE_URL='' + +ac_unique_file="src" +# Factoring default headers for most tests. +ac_includes_default="\ +#include +#ifdef HAVE_SYS_TYPES_H +# include +#endif +#ifdef HAVE_SYS_STAT_H +# include +#endif +#ifdef STDC_HEADERS +# include +# include +#else +# ifdef HAVE_STDLIB_H +# include +# endif +#endif +#ifdef HAVE_STRING_H +# if !defined STDC_HEADERS && defined HAVE_MEMORY_H +# include +# endif +# include +#endif +#ifdef HAVE_STRINGS_H +# include +#endif +#ifdef HAVE_INTTYPES_H +# include +#endif +#ifdef HAVE_STDINT_H +# include +#endif +#ifdef HAVE_UNISTD_H +# include +#endif" + +ac_subst_vars='am__EXEEXT_FALSE +am__EXEEXT_TRUE +LTLIBOBJS +LIBOBJS +ALLOCA +HAVE_SYS_UN_H_FALSE +HAVE_SYS_UN_H_TRUE +BUILD_EXAMPLES_FALSE +BUILD_EXAMPLES_TRUE +LIBSREQUIRED +LIBZ_PREFIX +LTLIBZ +LIBZ +HAVE_LIBZ +OS400QC3_FALSE +OS400QC3_TRUE +WINCNG_FALSE +WINCNG_TRUE +LIBCRYPT32_PREFIX +LTLIBCRYPT32 +LIBCRYPT32 +HAVE_LIBCRYPT32 +LIBBCRYPT_PREFIX +LTLIBBCRYPT +LIBBCRYPT +HAVE_LIBBCRYPT +LIBGCRYPT_FALSE +LIBGCRYPT_TRUE +LIBGCRYPT_PREFIX +LTLIBGCRYPT +LIBGCRYPT +HAVE_LIBGCRYPT +OPENSSL_FALSE +OPENSSL_TRUE +LIBSSL_PREFIX +LTLIBSSL +LIBSSL +HAVE_LIBSSL +OTOOL64 +OTOOL +LIPO +NMEDIT +DSYMUTIL +MANIFEST_TOOL +RANLIB +ac_ct_AR +AR +NM +ac_ct_DUMPBIN +DUMPBIN +LD +FGREP +LIBTOOL +OBJDUMP +DLLTOOL +AS +SSHD_FALSE +SSHD_TRUE +SSHD +LN_S +EGREP +GREP +CPP +am__fastdepCC_FALSE +am__fastdepCC_TRUE +CCDEPMODE +am__nodep +AMDEPBACKSLASH +AMDEP_FALSE +AMDEP_TRUE +am__quote +am__include +DEPDIR +OBJEXT +EXEEXT +ac_ct_CC +CPPFLAGS +LDFLAGS +CFLAGS +CC +host_os +host_vendor +host_cpu +host +build_os +build_vendor +build_cpu +build +LIBSSH2VER +am__untar +am__tar +AMTAR +am__leading_dot +SET_MAKE +AWK +mkdir_p +MKDIR_P +INSTALL_STRIP_PROGRAM +STRIP +install_sh +MAKEINFO +AUTOHEADER +AUTOMAKE +AUTOCONF +ACLOCAL +VERSION +PACKAGE +CYGPATH_W +am__isrc +INSTALL_DATA +INSTALL_SCRIPT +INSTALL_PROGRAM +SED +AM_BACKSLASH +AM_DEFAULT_VERBOSITY +AM_DEFAULT_V +AM_V +MAINT +MAINTAINER_MODE_FALSE +MAINTAINER_MODE_TRUE +target_alias +host_alias +build_alias +LIBS +ECHO_T +ECHO_N +ECHO_C +DEFS +mandir +localedir +libdir +psdir +pdfdir +dvidir +htmldir +infodir +docdir +oldincludedir +includedir +runstatedir +localstatedir +sharedstatedir +sysconfdir +datadir +datarootdir +libexecdir +sbindir +bindir +program_transform_name +prefix +exec_prefix +PACKAGE_URL +PACKAGE_BUGREPORT +PACKAGE_STRING +PACKAGE_VERSION +PACKAGE_TARNAME +PACKAGE_NAME +PATH_SEPARATOR +SHELL' +ac_subst_files='' +ac_user_opts=' +enable_option_checking +enable_maintainer_mode +enable_silent_rules +enable_dependency_tracking +enable_shared +enable_static +with_pic +enable_fast_install +with_gnu_ld +with_sysroot +enable_libtool_lock +enable_largefile +with_openssl +with_libgcrypt +with_wincng +with_libz +enable_rpath +with_libssl_prefix +with_libgcrypt_prefix +with_libbcrypt_prefix +with_libcrypt32_prefix +with_libz_prefix +enable_crypt_none +enable_mac_none +enable_gex_new +enable_clear_memory +enable_debug +enable_hidden_symbols +enable_examples_build +' + ac_precious_vars='build_alias +host_alias +target_alias +CC +CFLAGS +LDFLAGS +LIBS +CPPFLAGS +CPP' + + +# Initialize some variables set by options. +ac_init_help= +ac_init_version=false +ac_unrecognized_opts= +ac_unrecognized_sep= +# The variables have the same names as the options, with +# dashes changed to underlines. +cache_file=/dev/null +exec_prefix=NONE +no_create= +no_recursion= +prefix=NONE +program_prefix=NONE +program_suffix=NONE +program_transform_name=s,x,x, +silent= +site= +srcdir= +verbose= +x_includes=NONE +x_libraries=NONE + +# Installation directory options. +# These are left unexpanded so users can "make install exec_prefix=/foo" +# and all the variables that are supposed to be based on exec_prefix +# by default will actually change. +# Use braces instead of parens because sh, perl, etc. also accept them. +# (The list follows the same order as the GNU Coding Standards.) +bindir='${exec_prefix}/bin' +sbindir='${exec_prefix}/sbin' +libexecdir='${exec_prefix}/libexec' +datarootdir='${prefix}/share' +datadir='${datarootdir}' +sysconfdir='${prefix}/etc' +sharedstatedir='${prefix}/com' +localstatedir='${prefix}/var' +runstatedir='${localstatedir}/run' +includedir='${prefix}/include' +oldincludedir='/usr/include' +docdir='${datarootdir}/doc/${PACKAGE_TARNAME}' +infodir='${datarootdir}/info' +htmldir='${docdir}' +dvidir='${docdir}' +pdfdir='${docdir}' +psdir='${docdir}' +libdir='${exec_prefix}/lib' +localedir='${datarootdir}/locale' +mandir='${datarootdir}/man' + +ac_prev= +ac_dashdash= +for ac_option +do + # If the previous option needs an argument, assign it. + if test -n "$ac_prev"; then + eval $ac_prev=\$ac_option + ac_prev= + continue + fi + + case $ac_option in + *=?*) ac_optarg=`expr "X$ac_option" : '[^=]*=\(.*\)'` ;; + *=) ac_optarg= ;; + *) ac_optarg=yes ;; + esac + + # Accept the important Cygnus configure options, so we can diagnose typos. + + case $ac_dashdash$ac_option in + --) + ac_dashdash=yes ;; + + -bindir | --bindir | --bindi | --bind | --bin | --bi) + ac_prev=bindir ;; + -bindir=* | --bindir=* | --bindi=* | --bind=* | --bin=* | --bi=*) + bindir=$ac_optarg ;; + + -build | --build | --buil | --bui | --bu) + ac_prev=build_alias ;; + -build=* | --build=* | --buil=* | --bui=* | --bu=*) + build_alias=$ac_optarg ;; + + -cache-file | --cache-file | --cache-fil | --cache-fi \ + | --cache-f | --cache- | --cache | --cach | --cac | --ca | --c) + ac_prev=cache_file ;; + -cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \ + | --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* | --c=*) + cache_file=$ac_optarg ;; + + --config-cache | -C) + cache_file=config.cache ;; + + -datadir | --datadir | --datadi | --datad) + ac_prev=datadir ;; + -datadir=* | --datadir=* | --datadi=* | --datad=*) + datadir=$ac_optarg ;; + + -datarootdir | --datarootdir | --datarootdi | --datarootd | --dataroot \ + | --dataroo | --dataro | --datar) + ac_prev=datarootdir ;; + -datarootdir=* | --datarootdir=* | --datarootdi=* | --datarootd=* \ + | --dataroot=* | --dataroo=* | --dataro=* | --datar=*) + datarootdir=$ac_optarg ;; + + -disable-* | --disable-*) + ac_useropt=`expr "x$ac_option" : 'x-*disable-\(.*\)'` + # Reject names that are not valid shell variable names. + expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && + as_fn_error $? "invalid feature name: $ac_useropt" + ac_useropt_orig=$ac_useropt + ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` + case $ac_user_opts in + *" +"enable_$ac_useropt" +"*) ;; + *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--disable-$ac_useropt_orig" + ac_unrecognized_sep=', ';; + esac + eval enable_$ac_useropt=no ;; + + -docdir | --docdir | --docdi | --doc | --do) + ac_prev=docdir ;; + -docdir=* | --docdir=* | --docdi=* | --doc=* | --do=*) + docdir=$ac_optarg ;; + + -dvidir | --dvidir | --dvidi | --dvid | --dvi | --dv) + ac_prev=dvidir ;; + -dvidir=* | --dvidir=* | --dvidi=* | --dvid=* | --dvi=* | --dv=*) + dvidir=$ac_optarg ;; + + -enable-* | --enable-*) + ac_useropt=`expr "x$ac_option" : 'x-*enable-\([^=]*\)'` + # Reject names that are not valid shell variable names. + expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && + as_fn_error $? "invalid feature name: $ac_useropt" + ac_useropt_orig=$ac_useropt + ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` + case $ac_user_opts in + *" +"enable_$ac_useropt" +"*) ;; + *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--enable-$ac_useropt_orig" + ac_unrecognized_sep=', ';; + esac + eval enable_$ac_useropt=\$ac_optarg ;; + + -exec-prefix | --exec_prefix | --exec-prefix | --exec-prefi \ + | --exec-pref | --exec-pre | --exec-pr | --exec-p | --exec- \ + | --exec | --exe | --ex) + ac_prev=exec_prefix ;; + -exec-prefix=* | --exec_prefix=* | --exec-prefix=* | --exec-prefi=* \ + | --exec-pref=* | --exec-pre=* | --exec-pr=* | --exec-p=* | --exec-=* \ + | --exec=* | --exe=* | --ex=*) + exec_prefix=$ac_optarg ;; + + -gas | --gas | --ga | --g) + # Obsolete; use --with-gas. + with_gas=yes ;; + + -help | --help | --hel | --he | -h) + ac_init_help=long ;; + -help=r* | --help=r* | --hel=r* | --he=r* | -hr*) + ac_init_help=recursive ;; + -help=s* | --help=s* | --hel=s* | --he=s* | -hs*) + ac_init_help=short ;; + + -host | --host | --hos | --ho) + ac_prev=host_alias ;; + -host=* | --host=* | --hos=* | --ho=*) + host_alias=$ac_optarg ;; + + -htmldir | --htmldir | --htmldi | --htmld | --html | --htm | --ht) + ac_prev=htmldir ;; + -htmldir=* | --htmldir=* | --htmldi=* | --htmld=* | --html=* | --htm=* \ + | --ht=*) + htmldir=$ac_optarg ;; + + -includedir | --includedir | --includedi | --included | --include \ + | --includ | --inclu | --incl | --inc) + ac_prev=includedir ;; + -includedir=* | --includedir=* | --includedi=* | --included=* | --include=* \ + | --includ=* | --inclu=* | --incl=* | --inc=*) + includedir=$ac_optarg ;; + + -infodir | --infodir | --infodi | --infod | --info | --inf) + ac_prev=infodir ;; + -infodir=* | --infodir=* | --infodi=* | --infod=* | --info=* | --inf=*) + infodir=$ac_optarg ;; + + -libdir | --libdir | --libdi | --libd) + ac_prev=libdir ;; + -libdir=* | --libdir=* | --libdi=* | --libd=*) + libdir=$ac_optarg ;; + + -libexecdir | --libexecdir | --libexecdi | --libexecd | --libexec \ + | --libexe | --libex | --libe) + ac_prev=libexecdir ;; + -libexecdir=* | --libexecdir=* | --libexecdi=* | --libexecd=* | --libexec=* \ + | --libexe=* | --libex=* | --libe=*) + libexecdir=$ac_optarg ;; + + -localedir | --localedir | --localedi | --localed | --locale) + ac_prev=localedir ;; + -localedir=* | --localedir=* | --localedi=* | --localed=* | --locale=*) + localedir=$ac_optarg ;; + + -localstatedir | --localstatedir | --localstatedi | --localstated \ + | --localstate | --localstat | --localsta | --localst | --locals) + ac_prev=localstatedir ;; + -localstatedir=* | --localstatedir=* | --localstatedi=* | --localstated=* \ + | --localstate=* | --localstat=* | --localsta=* | --localst=* | --locals=*) + localstatedir=$ac_optarg ;; + + -mandir | --mandir | --mandi | --mand | --man | --ma | --m) + ac_prev=mandir ;; + -mandir=* | --mandir=* | --mandi=* | --mand=* | --man=* | --ma=* | --m=*) + mandir=$ac_optarg ;; + + -nfp | --nfp | --nf) + # Obsolete; use --without-fp. + with_fp=no ;; + + -no-create | --no-create | --no-creat | --no-crea | --no-cre \ + | --no-cr | --no-c | -n) + no_create=yes ;; + + -no-recursion | --no-recursion | --no-recursio | --no-recursi \ + | --no-recurs | --no-recur | --no-recu | --no-rec | --no-re | --no-r) + no_recursion=yes ;; + + -oldincludedir | --oldincludedir | --oldincludedi | --oldincluded \ + | --oldinclude | --oldinclud | --oldinclu | --oldincl | --oldinc \ + | --oldin | --oldi | --old | --ol | --o) + ac_prev=oldincludedir ;; + -oldincludedir=* | --oldincludedir=* | --oldincludedi=* | --oldincluded=* \ + | --oldinclude=* | --oldinclud=* | --oldinclu=* | --oldincl=* | --oldinc=* \ + | --oldin=* | --oldi=* | --old=* | --ol=* | --o=*) + oldincludedir=$ac_optarg ;; + + -prefix | --prefix | --prefi | --pref | --pre | --pr | --p) + ac_prev=prefix ;; + -prefix=* | --prefix=* | --prefi=* | --pref=* | --pre=* | --pr=* | --p=*) + prefix=$ac_optarg ;; + + -program-prefix | --program-prefix | --program-prefi | --program-pref \ + | --program-pre | --program-pr | --program-p) + ac_prev=program_prefix ;; + -program-prefix=* | --program-prefix=* | --program-prefi=* \ + | --program-pref=* | --program-pre=* | --program-pr=* | --program-p=*) + program_prefix=$ac_optarg ;; + + -program-suffix | --program-suffix | --program-suffi | --program-suff \ + | --program-suf | --program-su | --program-s) + ac_prev=program_suffix ;; + -program-suffix=* | --program-suffix=* | --program-suffi=* \ + | --program-suff=* | --program-suf=* | --program-su=* | --program-s=*) + program_suffix=$ac_optarg ;; + + -program-transform-name | --program-transform-name \ + | --program-transform-nam | --program-transform-na \ + | --program-transform-n | --program-transform- \ + | --program-transform | --program-transfor \ + | --program-transfo | --program-transf \ + | --program-trans | --program-tran \ + | --progr-tra | --program-tr | --program-t) + ac_prev=program_transform_name ;; + -program-transform-name=* | --program-transform-name=* \ + | --program-transform-nam=* | --program-transform-na=* \ + | --program-transform-n=* | --program-transform-=* \ + | --program-transform=* | --program-transfor=* \ + | --program-transfo=* | --program-transf=* \ + | --program-trans=* | --program-tran=* \ + | --progr-tra=* | --program-tr=* | --program-t=*) + program_transform_name=$ac_optarg ;; + + -pdfdir | --pdfdir | --pdfdi | --pdfd | --pdf | --pd) + ac_prev=pdfdir ;; + -pdfdir=* | --pdfdir=* | --pdfdi=* | --pdfd=* | --pdf=* | --pd=*) + pdfdir=$ac_optarg ;; + + -psdir | --psdir | --psdi | --psd | --ps) + ac_prev=psdir ;; + -psdir=* | --psdir=* | --psdi=* | --psd=* | --ps=*) + psdir=$ac_optarg ;; + + -q | -quiet | --quiet | --quie | --qui | --qu | --q \ + | -silent | --silent | --silen | --sile | --sil) + silent=yes ;; + + -runstatedir | --runstatedir | --runstatedi | --runstated \ + | --runstate | --runstat | --runsta | --runst | --runs \ + | --run | --ru | --r) + ac_prev=runstatedir ;; + -runstatedir=* | --runstatedir=* | --runstatedi=* | --runstated=* \ + | --runstate=* | --runstat=* | --runsta=* | --runst=* | --runs=* \ + | --run=* | --ru=* | --r=*) + runstatedir=$ac_optarg ;; + + -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb) + ac_prev=sbindir ;; + -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \ + | --sbi=* | --sb=*) + sbindir=$ac_optarg ;; + + -sharedstatedir | --sharedstatedir | --sharedstatedi \ + | --sharedstated | --sharedstate | --sharedstat | --sharedsta \ + | --sharedst | --shareds | --shared | --share | --shar \ + | --sha | --sh) + ac_prev=sharedstatedir ;; + -sharedstatedir=* | --sharedstatedir=* | --sharedstatedi=* \ + | --sharedstated=* | --sharedstate=* | --sharedstat=* | --sharedsta=* \ + | --sharedst=* | --shareds=* | --shared=* | --share=* | --shar=* \ + | --sha=* | --sh=*) + sharedstatedir=$ac_optarg ;; + + -site | --site | --sit) + ac_prev=site ;; + -site=* | --site=* | --sit=*) + site=$ac_optarg ;; + + -srcdir | --srcdir | --srcdi | --srcd | --src | --sr) + ac_prev=srcdir ;; + -srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*) + srcdir=$ac_optarg ;; + + -sysconfdir | --sysconfdir | --sysconfdi | --sysconfd | --sysconf \ + | --syscon | --sysco | --sysc | --sys | --sy) + ac_prev=sysconfdir ;; + -sysconfdir=* | --sysconfdir=* | --sysconfdi=* | --sysconfd=* | --sysconf=* \ + | --syscon=* | --sysco=* | --sysc=* | --sys=* | --sy=*) + sysconfdir=$ac_optarg ;; + + -target | --target | --targe | --targ | --tar | --ta | --t) + ac_prev=target_alias ;; + -target=* | --target=* | --targe=* | --targ=* | --tar=* | --ta=* | --t=*) + target_alias=$ac_optarg ;; + + -v | -verbose | --verbose | --verbos | --verbo | --verb) + verbose=yes ;; + + -version | --version | --versio | --versi | --vers | -V) + ac_init_version=: ;; + + -with-* | --with-*) + ac_useropt=`expr "x$ac_option" : 'x-*with-\([^=]*\)'` + # Reject names that are not valid shell variable names. + expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && + as_fn_error $? "invalid package name: $ac_useropt" + ac_useropt_orig=$ac_useropt + ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` + case $ac_user_opts in + *" +"with_$ac_useropt" +"*) ;; + *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--with-$ac_useropt_orig" + ac_unrecognized_sep=', ';; + esac + eval with_$ac_useropt=\$ac_optarg ;; + + -without-* | --without-*) + ac_useropt=`expr "x$ac_option" : 'x-*without-\(.*\)'` + # Reject names that are not valid shell variable names. + expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && + as_fn_error $? "invalid package name: $ac_useropt" + ac_useropt_orig=$ac_useropt + ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` + case $ac_user_opts in + *" +"with_$ac_useropt" +"*) ;; + *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--without-$ac_useropt_orig" + ac_unrecognized_sep=', ';; + esac + eval with_$ac_useropt=no ;; + + --x) + # Obsolete; use --with-x. + with_x=yes ;; + + -x-includes | --x-includes | --x-include | --x-includ | --x-inclu \ + | --x-incl | --x-inc | --x-in | --x-i) + ac_prev=x_includes ;; + -x-includes=* | --x-includes=* | --x-include=* | --x-includ=* | --x-inclu=* \ + | --x-incl=* | --x-inc=* | --x-in=* | --x-i=*) + x_includes=$ac_optarg ;; + + -x-libraries | --x-libraries | --x-librarie | --x-librari \ + | --x-librar | --x-libra | --x-libr | --x-lib | --x-li | --x-l) + ac_prev=x_libraries ;; + -x-libraries=* | --x-libraries=* | --x-librarie=* | --x-librari=* \ + | --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*) + x_libraries=$ac_optarg ;; + + -*) as_fn_error $? "unrecognized option: \`$ac_option' +Try \`$0 --help' for more information" + ;; + + *=*) + ac_envvar=`expr "x$ac_option" : 'x\([^=]*\)='` + # Reject names that are not valid shell variable names. + case $ac_envvar in #( + '' | [0-9]* | *[!_$as_cr_alnum]* ) + as_fn_error $? "invalid variable name: \`$ac_envvar'" ;; + esac + eval $ac_envvar=\$ac_optarg + export $ac_envvar ;; + + *) + # FIXME: should be removed in autoconf 3.0. + $as_echo "$as_me: WARNING: you should use --build, --host, --target" >&2 + expr "x$ac_option" : ".*[^-._$as_cr_alnum]" >/dev/null && + $as_echo "$as_me: WARNING: invalid host type: $ac_option" >&2 + : "${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option}" + ;; + + esac +done + +if test -n "$ac_prev"; then + ac_option=--`echo $ac_prev | sed 's/_/-/g'` + as_fn_error $? "missing argument to $ac_option" +fi + +if test -n "$ac_unrecognized_opts"; then + case $enable_option_checking in + no) ;; + fatal) as_fn_error $? "unrecognized options: $ac_unrecognized_opts" ;; + *) $as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2 ;; + esac +fi + +# Check all directory arguments for consistency. +for ac_var in exec_prefix prefix bindir sbindir libexecdir datarootdir \ + datadir sysconfdir sharedstatedir localstatedir includedir \ + oldincludedir docdir infodir htmldir dvidir pdfdir psdir \ + libdir localedir mandir runstatedir +do + eval ac_val=\$$ac_var + # Remove trailing slashes. + case $ac_val in + */ ) + ac_val=`expr "X$ac_val" : 'X\(.*[^/]\)' \| "X$ac_val" : 'X\(.*\)'` + eval $ac_var=\$ac_val;; + esac + # Be sure to have absolute directory names. + case $ac_val in + [\\/$]* | ?:[\\/]* ) continue;; + NONE | '' ) case $ac_var in *prefix ) continue;; esac;; + esac + as_fn_error $? "expected an absolute directory name for --$ac_var: $ac_val" +done + +# There might be people who depend on the old broken behavior: `$host' +# used to hold the argument of --host etc. +# FIXME: To remove some day. +build=$build_alias +host=$host_alias +target=$target_alias + +# FIXME: To remove some day. +if test "x$host_alias" != x; then + if test "x$build_alias" = x; then + cross_compiling=maybe + elif test "x$build_alias" != "x$host_alias"; then + cross_compiling=yes + fi +fi + +ac_tool_prefix= +test -n "$host_alias" && ac_tool_prefix=$host_alias- + +test "$silent" = yes && exec 6>/dev/null + + +ac_pwd=`pwd` && test -n "$ac_pwd" && +ac_ls_di=`ls -di .` && +ac_pwd_ls_di=`cd "$ac_pwd" && ls -di .` || + as_fn_error $? "working directory cannot be determined" +test "X$ac_ls_di" = "X$ac_pwd_ls_di" || + as_fn_error $? "pwd does not report name of working directory" + + +# Find the source files, if location was not specified. +if test -z "$srcdir"; then + ac_srcdir_defaulted=yes + # Try the directory containing this script, then the parent directory. + ac_confdir=`$as_dirname -- "$as_myself" || +$as_expr X"$as_myself" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$as_myself" : 'X\(//\)[^/]' \| \ + X"$as_myself" : 'X\(//\)$' \| \ + X"$as_myself" : 'X\(/\)' \| . 2>/dev/null || +$as_echo X"$as_myself" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + srcdir=$ac_confdir + if test ! -r "$srcdir/$ac_unique_file"; then + srcdir=.. + fi +else + ac_srcdir_defaulted=no +fi +if test ! -r "$srcdir/$ac_unique_file"; then + test "$ac_srcdir_defaulted" = yes && srcdir="$ac_confdir or .." + as_fn_error $? "cannot find sources ($ac_unique_file) in $srcdir" +fi +ac_msg="sources are in $srcdir, but \`cd $srcdir' does not work" +ac_abs_confdir=`( + cd "$srcdir" && test -r "./$ac_unique_file" || as_fn_error $? "$ac_msg" + pwd)` +# When building in place, set srcdir=. +if test "$ac_abs_confdir" = "$ac_pwd"; then + srcdir=. +fi +# Remove unnecessary trailing slashes from srcdir. +# Double slashes in file names in object file debugging info +# mess up M-x gdb in Emacs. +case $srcdir in +*/) srcdir=`expr "X$srcdir" : 'X\(.*[^/]\)' \| "X$srcdir" : 'X\(.*\)'`;; +esac +for ac_var in $ac_precious_vars; do + eval ac_env_${ac_var}_set=\${${ac_var}+set} + eval ac_env_${ac_var}_value=\$${ac_var} + eval ac_cv_env_${ac_var}_set=\${${ac_var}+set} + eval ac_cv_env_${ac_var}_value=\$${ac_var} +done + +# +# Report the --help message. +# +if test "$ac_init_help" = "long"; then + # Omit some internal or obsolete options to make the list less imposing. + # This message is too long to be a string in the A/UX 3.1 sh. + cat <<_ACEOF +\`configure' configures libssh2 - to adapt to many kinds of systems. + +Usage: $0 [OPTION]... [VAR=VALUE]... + +To assign environment variables (e.g., CC, CFLAGS...), specify them as +VAR=VALUE. See below for descriptions of some of the useful variables. + +Defaults for the options are specified in brackets. + +Configuration: + -h, --help display this help and exit + --help=short display options specific to this package + --help=recursive display the short help of all the included packages + -V, --version display version information and exit + -q, --quiet, --silent do not print \`checking ...' messages + --cache-file=FILE cache test results in FILE [disabled] + -C, --config-cache alias for \`--cache-file=config.cache' + -n, --no-create do not create output files + --srcdir=DIR find the sources in DIR [configure dir or \`..'] + +Installation directories: + --prefix=PREFIX install architecture-independent files in PREFIX + [$ac_default_prefix] + --exec-prefix=EPREFIX install architecture-dependent files in EPREFIX + [PREFIX] + +By default, \`make install' will install all the files in +\`$ac_default_prefix/bin', \`$ac_default_prefix/lib' etc. You can specify +an installation prefix other than \`$ac_default_prefix' using \`--prefix', +for instance \`--prefix=\$HOME'. + +For better control, use the options below. + +Fine tuning of the installation directories: + --bindir=DIR user executables [EPREFIX/bin] + --sbindir=DIR system admin executables [EPREFIX/sbin] + --libexecdir=DIR program executables [EPREFIX/libexec] + --sysconfdir=DIR read-only single-machine data [PREFIX/etc] + --sharedstatedir=DIR modifiable architecture-independent data [PREFIX/com] + --localstatedir=DIR modifiable single-machine data [PREFIX/var] + --runstatedir=DIR modifiable per-process data [LOCALSTATEDIR/run] + --libdir=DIR object code libraries [EPREFIX/lib] + --includedir=DIR C header files [PREFIX/include] + --oldincludedir=DIR C header files for non-gcc [/usr/include] + --datarootdir=DIR read-only arch.-independent data root [PREFIX/share] + --datadir=DIR read-only architecture-independent data [DATAROOTDIR] + --infodir=DIR info documentation [DATAROOTDIR/info] + --localedir=DIR locale-dependent data [DATAROOTDIR/locale] + --mandir=DIR man documentation [DATAROOTDIR/man] + --docdir=DIR documentation root [DATAROOTDIR/doc/libssh2] + --htmldir=DIR html documentation [DOCDIR] + --dvidir=DIR dvi documentation [DOCDIR] + --pdfdir=DIR pdf documentation [DOCDIR] + --psdir=DIR ps documentation [DOCDIR] +_ACEOF + + cat <<\_ACEOF + +Program names: + --program-prefix=PREFIX prepend PREFIX to installed program names + --program-suffix=SUFFIX append SUFFIX to installed program names + --program-transform-name=PROGRAM run sed PROGRAM on installed program names + +System types: + --build=BUILD configure for building on BUILD [guessed] + --host=HOST cross-compile to build programs to run on HOST [BUILD] +_ACEOF +fi + +if test -n "$ac_init_help"; then + case $ac_init_help in + short | recursive ) echo "Configuration of libssh2 -:";; + esac + cat <<\_ACEOF + +Optional Features: + --disable-option-checking ignore unrecognized --enable/--with options + --disable-FEATURE do not include FEATURE (same as --enable-FEATURE=no) + --enable-FEATURE[=ARG] include FEATURE [ARG=yes] + --enable-maintainer-mode + enable make rules and dependencies not useful (and + sometimes confusing) to the casual installer + --enable-silent-rules less verbose build output (undo: "make V=1") + --disable-silent-rules verbose build output (undo: "make V=0") + --enable-dependency-tracking + do not reject slow dependency extractors + --disable-dependency-tracking + speeds up one-time build + --enable-shared[=PKGS] build shared libraries [default=yes] + --enable-static[=PKGS] build static libraries [default=yes] + --enable-fast-install[=PKGS] + optimize for fast installation [default=yes] + --disable-libtool-lock avoid locking (might break parallel builds) + --disable-largefile omit support for large files + --disable-rpath do not hardcode runtime library paths + --enable-crypt-none Permit "none" cipher -- NOT RECOMMENDED + --enable-mac-none Permit "none" MAC -- NOT RECOMMENDED + --disable-gex-new Disable "new" diffie-hellman-group-exchange-sha1 + method + --disable-clear-memory Disable clearing of memory before being freed + --enable-debug Enable pedantic and debug options + --disable-debug Disable debug options + --enable-hidden-symbols Hide internal symbols in library + --disable-hidden-symbols + Leave all symbols with default visibility in library + --enable-examples-build Build example applications (this is the default) + --disable-examples-build + Do not build example applications + +Optional Packages: + --with-PACKAGE[=ARG] use PACKAGE [ARG=yes] + --without-PACKAGE do not use PACKAGE (same as --with-PACKAGE=no) + --with-pic[=PKGS] try to use only PIC/non-PIC objects [default=use + both] + --with-gnu-ld assume the C compiler uses GNU ld [default=no] + --with-sysroot=DIR Search for dependent libraries within DIR + (or the compiler's sysroot if not specified). + --with-openssl Use OpenSSL for crypto + --with-libgcrypt Use libgcrypt for crypto + --with-wincng Use Windows CNG for crypto + --with-libz Use zlib for compression + --with-gnu-ld assume the C compiler uses GNU ld default=no + --with-libssl-prefix[=DIR] search for libssl in DIR/include and DIR/lib + --without-libssl-prefix don't search for libssl in includedir and libdir + --with-libgcrypt-prefix[=DIR] search for libgcrypt in DIR/include and DIR/lib + --without-libgcrypt-prefix don't search for libgcrypt in includedir and libdir + --with-libbcrypt-prefix[=DIR] search for libbcrypt in DIR/include and DIR/lib + --without-libbcrypt-prefix don't search for libbcrypt in includedir and libdir + --with-libcrypt32-prefix[=DIR] search for libcrypt32 in DIR/include and DIR/lib + --without-libcrypt32-prefix don't search for libcrypt32 in includedir and libdir + --with-libz-prefix[=DIR] search for libz in DIR/include and DIR/lib + --without-libz-prefix don't search for libz in includedir and libdir + +Some influential environment variables: + CC C compiler command + CFLAGS C compiler flags + LDFLAGS linker flags, e.g. -L if you have libraries in a + nonstandard directory + LIBS libraries to pass to the linker, e.g. -l + CPPFLAGS (Objective) C/C++ preprocessor flags, e.g. -I if + you have headers in a nonstandard directory + CPP C preprocessor + +Use these variables to override the choices made by `configure' or to help +it to find libraries and programs with nonstandard names/locations. + +Report bugs to . +_ACEOF +ac_status=$? +fi + +if test "$ac_init_help" = "recursive"; then + # If there are subdirs, report their specific --help. + for ac_dir in : $ac_subdirs_all; do test "x$ac_dir" = x: && continue + test -d "$ac_dir" || + { cd "$srcdir" && ac_pwd=`pwd` && srcdir=. && test -d "$ac_dir"; } || + continue + ac_builddir=. + +case "$ac_dir" in +.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; +*) + ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` + # A ".." for each directory in $ac_dir_suffix. + ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` + case $ac_top_builddir_sub in + "") ac_top_builddir_sub=. ac_top_build_prefix= ;; + *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; + esac ;; +esac +ac_abs_top_builddir=$ac_pwd +ac_abs_builddir=$ac_pwd$ac_dir_suffix +# for backward compatibility: +ac_top_builddir=$ac_top_build_prefix + +case $srcdir in + .) # We are building in place. + ac_srcdir=. + ac_top_srcdir=$ac_top_builddir_sub + ac_abs_top_srcdir=$ac_pwd ;; + [\\/]* | ?:[\\/]* ) # Absolute name. + ac_srcdir=$srcdir$ac_dir_suffix; + ac_top_srcdir=$srcdir + ac_abs_top_srcdir=$srcdir ;; + *) # Relative name. + ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix + ac_top_srcdir=$ac_top_build_prefix$srcdir + ac_abs_top_srcdir=$ac_pwd/$srcdir ;; +esac +ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix + + cd "$ac_dir" || { ac_status=$?; continue; } + # Check for guested configure. + if test -f "$ac_srcdir/configure.gnu"; then + echo && + $SHELL "$ac_srcdir/configure.gnu" --help=recursive + elif test -f "$ac_srcdir/configure"; then + echo && + $SHELL "$ac_srcdir/configure" --help=recursive + else + $as_echo "$as_me: WARNING: no configuration information is in $ac_dir" >&2 + fi || ac_status=$? + cd "$ac_pwd" || { ac_status=$?; break; } + done +fi + +test -n "$ac_init_help" && exit $ac_status +if $ac_init_version; then + cat <<\_ACEOF +libssh2 configure - +generated by GNU Autoconf 2.69 + +Copyright (C) 2012 Free Software Foundation, Inc. +This configure script is free software; the Free Software Foundation +gives unlimited permission to copy, distribute and modify it. +_ACEOF + exit +fi + +## ------------------------ ## +## Autoconf initialization. ## +## ------------------------ ## + +# ac_fn_c_try_compile LINENO +# -------------------------- +# Try to compile conftest.$ac_ext, and return whether this succeeded. +ac_fn_c_try_compile () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + rm -f conftest.$ac_objext + if { { ac_try="$ac_compile" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_compile") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + grep -v '^ *+' conftest.err >conftest.er1 + cat conftest.er1 >&5 + mv -f conftest.er1 conftest.err + fi + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && { + test -z "$ac_c_werror_flag" || + test ! -s conftest.err + } && test -s conftest.$ac_objext; then : + ac_retval=0 +else + $as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + + ac_retval=1 +fi + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + as_fn_set_status $ac_retval + +} # ac_fn_c_try_compile + +# ac_fn_c_check_type LINENO TYPE VAR INCLUDES +# ------------------------------------------- +# Tests whether TYPE exists after having included INCLUDES, setting cache +# variable VAR accordingly. +ac_fn_c_check_type () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 +$as_echo_n "checking for $2... " >&6; } +if eval \${$3+:} false; then : + $as_echo_n "(cached) " >&6 +else + eval "$3=no" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +$4 +int +main () +{ +if (sizeof ($2)) + return 0; + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +$4 +int +main () +{ +if (sizeof (($2))) + return 0; + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + +else + eval "$3=yes" +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +eval ac_res=\$$3 + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + +} # ac_fn_c_check_type + +# ac_fn_c_try_cpp LINENO +# ---------------------- +# Try to preprocess conftest.$ac_ext, and return whether this succeeded. +ac_fn_c_try_cpp () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + if { { ac_try="$ac_cpp conftest.$ac_ext" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_cpp conftest.$ac_ext") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + grep -v '^ *+' conftest.err >conftest.er1 + cat conftest.er1 >&5 + mv -f conftest.er1 conftest.err + fi + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } > conftest.i && { + test -z "$ac_c_preproc_warn_flag$ac_c_werror_flag" || + test ! -s conftest.err + }; then : + ac_retval=0 +else + $as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + + ac_retval=1 +fi + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + as_fn_set_status $ac_retval + +} # ac_fn_c_try_cpp + +# ac_fn_c_try_run LINENO +# ---------------------- +# Try to link conftest.$ac_ext, and return whether this succeeded. Assumes +# that executables *can* be run. +ac_fn_c_try_run () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + if { { ac_try="$ac_link" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_link") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && { ac_try='./conftest$ac_exeext' + { { case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_try") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; }; then : + ac_retval=0 +else + $as_echo "$as_me: program exited with status $ac_status" >&5 + $as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + + ac_retval=$ac_status +fi + rm -rf conftest.dSYM conftest_ipa8_conftest.oo + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + as_fn_set_status $ac_retval + +} # ac_fn_c_try_run + +# ac_fn_c_check_header_compile LINENO HEADER VAR INCLUDES +# ------------------------------------------------------- +# Tests whether HEADER exists and can be compiled using the include files in +# INCLUDES, setting the cache variable VAR accordingly. +ac_fn_c_check_header_compile () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 +$as_echo_n "checking for $2... " >&6; } +if eval \${$3+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +$4 +#include <$2> +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + eval "$3=yes" +else + eval "$3=no" +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +eval ac_res=\$$3 + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + +} # ac_fn_c_check_header_compile + +# ac_fn_c_try_link LINENO +# ----------------------- +# Try to link conftest.$ac_ext, and return whether this succeeded. +ac_fn_c_try_link () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + rm -f conftest.$ac_objext conftest$ac_exeext + if { { ac_try="$ac_link" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_link") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + grep -v '^ *+' conftest.err >conftest.er1 + cat conftest.er1 >&5 + mv -f conftest.er1 conftest.err + fi + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && { + test -z "$ac_c_werror_flag" || + test ! -s conftest.err + } && test -s conftest$ac_exeext && { + test "$cross_compiling" = yes || + test -x conftest$ac_exeext + }; then : + ac_retval=0 +else + $as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + + ac_retval=1 +fi + # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information + # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would + # interfere with the next link command; also delete a directory that is + # left behind by Apple's compiler. We do this before executing the actions. + rm -rf conftest.dSYM conftest_ipa8_conftest.oo + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + as_fn_set_status $ac_retval + +} # ac_fn_c_try_link + +# ac_fn_c_check_func LINENO FUNC VAR +# ---------------------------------- +# Tests whether FUNC exists, setting the cache variable VAR accordingly +ac_fn_c_check_func () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 +$as_echo_n "checking for $2... " >&6; } +if eval \${$3+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +/* Define $2 to an innocuous variant, in case declares $2. + For example, HP-UX 11i declares gettimeofday. */ +#define $2 innocuous_$2 + +/* System header to define __stub macros and hopefully few prototypes, + which can conflict with char $2 (); below. + Prefer to if __STDC__ is defined, since + exists even on freestanding compilers. */ + +#ifdef __STDC__ +# include +#else +# include +#endif + +#undef $2 + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. */ +#ifdef __cplusplus +extern "C" +#endif +char $2 (); +/* The GNU C library defines this for functions which it implements + to always fail with ENOSYS. Some functions are actually named + something starting with __ and the normal name is an alias. */ +#if defined __stub_$2 || defined __stub___$2 +choke me +#endif + +int +main () +{ +return $2 (); + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + eval "$3=yes" +else + eval "$3=no" +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +fi +eval ac_res=\$$3 + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + +} # ac_fn_c_check_func + +# ac_fn_c_check_decl LINENO SYMBOL VAR INCLUDES +# --------------------------------------------- +# Tests whether SYMBOL is declared in INCLUDES, setting cache variable VAR +# accordingly. +ac_fn_c_check_decl () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + as_decl_name=`echo $2|sed 's/ *(.*//'` + as_decl_use=`echo $2|sed -e 's/(/((/' -e 's/)/) 0&/' -e 's/,/) 0& (/g'` + { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $as_decl_name is declared" >&5 +$as_echo_n "checking whether $as_decl_name is declared... " >&6; } +if eval \${$3+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +$4 +int +main () +{ +#ifndef $as_decl_name +#ifdef __cplusplus + (void) $as_decl_use; +#else + (void) $as_decl_name; +#endif +#endif + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + eval "$3=yes" +else + eval "$3=no" +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +eval ac_res=\$$3 + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + +} # ac_fn_c_check_decl + +# ac_fn_c_check_header_mongrel LINENO HEADER VAR INCLUDES +# ------------------------------------------------------- +# Tests whether HEADER exists, giving a warning if it cannot be compiled using +# the include files in INCLUDES and setting the cache variable VAR +# accordingly. +ac_fn_c_check_header_mongrel () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + if eval \${$3+:} false; then : + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 +$as_echo_n "checking for $2... " >&6; } +if eval \${$3+:} false; then : + $as_echo_n "(cached) " >&6 +fi +eval ac_res=\$$3 + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } +else + # Is the header compilable? +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 usability" >&5 +$as_echo_n "checking $2 usability... " >&6; } +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +$4 +#include <$2> +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_header_compiler=yes +else + ac_header_compiler=no +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_compiler" >&5 +$as_echo "$ac_header_compiler" >&6; } + +# Is the header present? +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 presence" >&5 +$as_echo_n "checking $2 presence... " >&6; } +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include <$2> +_ACEOF +if ac_fn_c_try_cpp "$LINENO"; then : + ac_header_preproc=yes +else + ac_header_preproc=no +fi +rm -f conftest.err conftest.i conftest.$ac_ext +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_preproc" >&5 +$as_echo "$ac_header_preproc" >&6; } + +# So? What about this header? +case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in #(( + yes:no: ) + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&5 +$as_echo "$as_me: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5 +$as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;} + ;; + no:yes:* ) + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: present but cannot be compiled" >&5 +$as_echo "$as_me: WARNING: $2: present but cannot be compiled" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: check for missing prerequisite headers?" >&5 +$as_echo "$as_me: WARNING: $2: check for missing prerequisite headers?" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: see the Autoconf documentation" >&5 +$as_echo "$as_me: WARNING: $2: see the Autoconf documentation" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&5 +$as_echo "$as_me: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5 +$as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;} +( $as_echo "## ----------------------------------------- ## +## Report this to libssh2-devel@cool.haxx.se ## +## ----------------------------------------- ##" + ) | sed "s/^/$as_me: WARNING: /" >&2 + ;; +esac + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 +$as_echo_n "checking for $2... " >&6; } +if eval \${$3+:} false; then : + $as_echo_n "(cached) " >&6 +else + eval "$3=\$ac_header_compiler" +fi +eval ac_res=\$$3 + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } +fi + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + +} # ac_fn_c_check_header_mongrel +cat >config.log <<_ACEOF +This file contains any messages produced by compilers while +running configure, to aid debugging if configure makes a mistake. + +It was created by libssh2 $as_me -, which was +generated by GNU Autoconf 2.69. Invocation command line was + + $ $0 $@ + +_ACEOF +exec 5>>config.log +{ +cat <<_ASUNAME +## --------- ## +## Platform. ## +## --------- ## + +hostname = `(hostname || uname -n) 2>/dev/null | sed 1q` +uname -m = `(uname -m) 2>/dev/null || echo unknown` +uname -r = `(uname -r) 2>/dev/null || echo unknown` +uname -s = `(uname -s) 2>/dev/null || echo unknown` +uname -v = `(uname -v) 2>/dev/null || echo unknown` + +/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null || echo unknown` +/bin/uname -X = `(/bin/uname -X) 2>/dev/null || echo unknown` + +/bin/arch = `(/bin/arch) 2>/dev/null || echo unknown` +/usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null || echo unknown` +/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null || echo unknown` +/usr/bin/hostinfo = `(/usr/bin/hostinfo) 2>/dev/null || echo unknown` +/bin/machine = `(/bin/machine) 2>/dev/null || echo unknown` +/usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null || echo unknown` +/bin/universe = `(/bin/universe) 2>/dev/null || echo unknown` + +_ASUNAME + +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + $as_echo "PATH: $as_dir" + done +IFS=$as_save_IFS + +} >&5 + +cat >&5 <<_ACEOF + + +## ----------- ## +## Core tests. ## +## ----------- ## + +_ACEOF + + +# Keep a trace of the command line. +# Strip out --no-create and --no-recursion so they do not pile up. +# Strip out --silent because we don't want to record it for future runs. +# Also quote any args containing shell meta-characters. +# Make two passes to allow for proper duplicate-argument suppression. +ac_configure_args= +ac_configure_args0= +ac_configure_args1= +ac_must_keep_next=false +for ac_pass in 1 2 +do + for ac_arg + do + case $ac_arg in + -no-create | --no-c* | -n | -no-recursion | --no-r*) continue ;; + -q | -quiet | --quiet | --quie | --qui | --qu | --q \ + | -silent | --silent | --silen | --sile | --sil) + continue ;; + *\'*) + ac_arg=`$as_echo "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;; + esac + case $ac_pass in + 1) as_fn_append ac_configure_args0 " '$ac_arg'" ;; + 2) + as_fn_append ac_configure_args1 " '$ac_arg'" + if test $ac_must_keep_next = true; then + ac_must_keep_next=false # Got value, back to normal. + else + case $ac_arg in + *=* | --config-cache | -C | -disable-* | --disable-* \ + | -enable-* | --enable-* | -gas | --g* | -nfp | --nf* \ + | -q | -quiet | --q* | -silent | --sil* | -v | -verb* \ + | -with-* | --with-* | -without-* | --without-* | --x) + case "$ac_configure_args0 " in + "$ac_configure_args1"*" '$ac_arg' "* ) continue ;; + esac + ;; + -* ) ac_must_keep_next=true ;; + esac + fi + as_fn_append ac_configure_args " '$ac_arg'" + ;; + esac + done +done +{ ac_configure_args0=; unset ac_configure_args0;} +{ ac_configure_args1=; unset ac_configure_args1;} + +# When interrupted or exit'd, cleanup temporary files, and complete +# config.log. We remove comments because anyway the quotes in there +# would cause problems or look ugly. +# WARNING: Use '\'' to represent an apostrophe within the trap. +# WARNING: Do not start the trap code with a newline, due to a FreeBSD 4.0 bug. +trap 'exit_status=$? + # Save into config.log some information that might help in debugging. + { + echo + + $as_echo "## ---------------- ## +## Cache variables. ## +## ---------------- ##" + echo + # The following way of writing the cache mishandles newlines in values, +( + for ac_var in `(set) 2>&1 | sed -n '\''s/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'\''`; do + eval ac_val=\$$ac_var + case $ac_val in #( + *${as_nl}*) + case $ac_var in #( + *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 +$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; + esac + case $ac_var in #( + _ | IFS | as_nl) ;; #( + BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( + *) { eval $ac_var=; unset $ac_var;} ;; + esac ;; + esac + done + (set) 2>&1 | + case $as_nl`(ac_space='\'' '\''; set) 2>&1` in #( + *${as_nl}ac_space=\ *) + sed -n \ + "s/'\''/'\''\\\\'\'''\''/g; + s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\''\\2'\''/p" + ;; #( + *) + sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" + ;; + esac | + sort +) + echo + + $as_echo "## ----------------- ## +## Output variables. ## +## ----------------- ##" + echo + for ac_var in $ac_subst_vars + do + eval ac_val=\$$ac_var + case $ac_val in + *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; + esac + $as_echo "$ac_var='\''$ac_val'\''" + done | sort + echo + + if test -n "$ac_subst_files"; then + $as_echo "## ------------------- ## +## File substitutions. ## +## ------------------- ##" + echo + for ac_var in $ac_subst_files + do + eval ac_val=\$$ac_var + case $ac_val in + *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; + esac + $as_echo "$ac_var='\''$ac_val'\''" + done | sort + echo + fi + + if test -s confdefs.h; then + $as_echo "## ----------- ## +## confdefs.h. ## +## ----------- ##" + echo + cat confdefs.h + echo + fi + test "$ac_signal" != 0 && + $as_echo "$as_me: caught signal $ac_signal" + $as_echo "$as_me: exit $exit_status" + } >&5 + rm -f core *.core core.conftest.* && + rm -f -r conftest* confdefs* conf$$* $ac_clean_files && + exit $exit_status +' 0 +for ac_signal in 1 2 13 15; do + trap 'ac_signal='$ac_signal'; as_fn_exit 1' $ac_signal +done +ac_signal=0 + +# confdefs.h avoids OS command line length limits that DEFS can exceed. +rm -f -r conftest* confdefs.h + +$as_echo "/* confdefs.h */" > confdefs.h + +# Predefined preprocessor variables. + +cat >>confdefs.h <<_ACEOF +#define PACKAGE_NAME "$PACKAGE_NAME" +_ACEOF + +cat >>confdefs.h <<_ACEOF +#define PACKAGE_TARNAME "$PACKAGE_TARNAME" +_ACEOF + +cat >>confdefs.h <<_ACEOF +#define PACKAGE_VERSION "$PACKAGE_VERSION" +_ACEOF + +cat >>confdefs.h <<_ACEOF +#define PACKAGE_STRING "$PACKAGE_STRING" +_ACEOF + +cat >>confdefs.h <<_ACEOF +#define PACKAGE_BUGREPORT "$PACKAGE_BUGREPORT" +_ACEOF + +cat >>confdefs.h <<_ACEOF +#define PACKAGE_URL "$PACKAGE_URL" +_ACEOF + + +# Let the site file select an alternate cache file if it wants to. +# Prefer an explicitly selected file to automatically selected ones. +ac_site_file1=NONE +ac_site_file2=NONE +if test -n "$CONFIG_SITE"; then + # We do not want a PATH search for config.site. + case $CONFIG_SITE in #(( + -*) ac_site_file1=./$CONFIG_SITE;; + */*) ac_site_file1=$CONFIG_SITE;; + *) ac_site_file1=./$CONFIG_SITE;; + esac +elif test "x$prefix" != xNONE; then + ac_site_file1=$prefix/share/config.site + ac_site_file2=$prefix/etc/config.site +else + ac_site_file1=$ac_default_prefix/share/config.site + ac_site_file2=$ac_default_prefix/etc/config.site +fi +for ac_site_file in "$ac_site_file1" "$ac_site_file2" +do + test "x$ac_site_file" = xNONE && continue + if test /dev/null != "$ac_site_file" && test -r "$ac_site_file"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: loading site script $ac_site_file" >&5 +$as_echo "$as_me: loading site script $ac_site_file" >&6;} + sed 's/^/| /' "$ac_site_file" >&5 + . "$ac_site_file" \ + || { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error $? "failed to load site script $ac_site_file +See \`config.log' for more details" "$LINENO" 5; } + fi +done + +if test -r "$cache_file"; then + # Some versions of bash will fail to source /dev/null (special files + # actually), so we avoid doing that. DJGPP emulates it as a regular file. + if test /dev/null != "$cache_file" && test -f "$cache_file"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: loading cache $cache_file" >&5 +$as_echo "$as_me: loading cache $cache_file" >&6;} + case $cache_file in + [\\/]* | ?:[\\/]* ) . "$cache_file";; + *) . "./$cache_file";; + esac + fi +else + { $as_echo "$as_me:${as_lineno-$LINENO}: creating cache $cache_file" >&5 +$as_echo "$as_me: creating cache $cache_file" >&6;} + >$cache_file +fi + +# Check that the precious variables saved in the cache have kept the same +# value. +ac_cache_corrupted=false +for ac_var in $ac_precious_vars; do + eval ac_old_set=\$ac_cv_env_${ac_var}_set + eval ac_new_set=\$ac_env_${ac_var}_set + eval ac_old_val=\$ac_cv_env_${ac_var}_value + eval ac_new_val=\$ac_env_${ac_var}_value + case $ac_old_set,$ac_new_set in + set,) + { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&5 +$as_echo "$as_me: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&2;} + ac_cache_corrupted=: ;; + ,set) + { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was not set in the previous run" >&5 +$as_echo "$as_me: error: \`$ac_var' was not set in the previous run" >&2;} + ac_cache_corrupted=: ;; + ,);; + *) + if test "x$ac_old_val" != "x$ac_new_val"; then + # differences in whitespace do not lead to failure. + ac_old_val_w=`echo x $ac_old_val` + ac_new_val_w=`echo x $ac_new_val` + if test "$ac_old_val_w" != "$ac_new_val_w"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' has changed since the previous run:" >&5 +$as_echo "$as_me: error: \`$ac_var' has changed since the previous run:" >&2;} + ac_cache_corrupted=: + else + { $as_echo "$as_me:${as_lineno-$LINENO}: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&5 +$as_echo "$as_me: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&2;} + eval $ac_var=\$ac_old_val + fi + { $as_echo "$as_me:${as_lineno-$LINENO}: former value: \`$ac_old_val'" >&5 +$as_echo "$as_me: former value: \`$ac_old_val'" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: current value: \`$ac_new_val'" >&5 +$as_echo "$as_me: current value: \`$ac_new_val'" >&2;} + fi;; + esac + # Pass precious variables to config.status. + if test "$ac_new_set" = set; then + case $ac_new_val in + *\'*) ac_arg=$ac_var=`$as_echo "$ac_new_val" | sed "s/'/'\\\\\\\\''/g"` ;; + *) ac_arg=$ac_var=$ac_new_val ;; + esac + case " $ac_configure_args " in + *" '$ac_arg' "*) ;; # Avoid dups. Use of quotes ensures accuracy. + *) as_fn_append ac_configure_args " '$ac_arg'" ;; + esac + fi +done +if $ac_cache_corrupted; then + { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: error: changes in the environment can compromise the build" >&5 +$as_echo "$as_me: error: changes in the environment can compromise the build" >&2;} + as_fn_error $? "run \`make distclean' and/or \`rm $cache_file' and start over" "$LINENO" 5 +fi +## -------------------- ## +## Main body of script. ## +## -------------------- ## + +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + + + + +ac_config_headers="$ac_config_headers src/libssh2_config.h example/libssh2_config.h" + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to enable maintainer-specific portions of Makefiles" >&5 +$as_echo_n "checking whether to enable maintainer-specific portions of Makefiles... " >&6; } + # Check whether --enable-maintainer-mode was given. +if test "${enable_maintainer_mode+set}" = set; then : + enableval=$enable_maintainer_mode; USE_MAINTAINER_MODE=$enableval +else + USE_MAINTAINER_MODE=no +fi + + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $USE_MAINTAINER_MODE" >&5 +$as_echo "$USE_MAINTAINER_MODE" >&6; } + if test $USE_MAINTAINER_MODE = yes; then + MAINTAINER_MODE_TRUE= + MAINTAINER_MODE_FALSE='#' +else + MAINTAINER_MODE_TRUE='#' + MAINTAINER_MODE_FALSE= +fi + + MAINT=$MAINTAINER_MODE_TRUE + + +# Check whether --enable-silent-rules was given. +if test "${enable_silent_rules+set}" = set; then : + enableval=$enable_silent_rules; +fi + +case $enable_silent_rules in # ((( + yes) AM_DEFAULT_VERBOSITY=0;; + no) AM_DEFAULT_VERBOSITY=1;; + *) AM_DEFAULT_VERBOSITY=0;; +esac +am_make=${MAKE-make} +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $am_make supports nested variables" >&5 +$as_echo_n "checking whether $am_make supports nested variables... " >&6; } +if ${am_cv_make_support_nested_variables+:} false; then : + $as_echo_n "(cached) " >&6 +else + if $as_echo 'TRUE=$(BAR$(V)) +BAR0=false +BAR1=true +V=1 +am__doit: + @$(TRUE) +.PHONY: am__doit' | $am_make -f - >/dev/null 2>&1; then + am_cv_make_support_nested_variables=yes +else + am_cv_make_support_nested_variables=no +fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_make_support_nested_variables" >&5 +$as_echo "$am_cv_make_support_nested_variables" >&6; } +if test $am_cv_make_support_nested_variables = yes; then + AM_V='$(V)' + AM_DEFAULT_V='$(AM_DEFAULT_VERBOSITY)' +else + AM_V=$AM_DEFAULT_VERBOSITY + AM_DEFAULT_V=$AM_DEFAULT_VERBOSITY +fi +AM_BACKSLASH='\' + + +# Extract the first word of "sed", so it can be a program name with args. +set dummy sed; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_path_SED+:} false; then : + $as_echo_n "(cached) " >&6 +else + case $SED in + [\\/]* | ?:[\\/]*) + ac_cv_path_SED="$SED" # Let the user override the test with a path. + ;; + *) + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +as_dummy="$PATH:/usr/bin:/usr/local/bin" +for as_dir in $as_dummy +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_path_SED="$as_dir/$ac_word$ac_exec_ext" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + + test -z "$ac_cv_path_SED" && ac_cv_path_SED="sed-was-not-found-by-configure" + ;; +esac +fi +SED=$ac_cv_path_SED +if test -n "$SED"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $SED" >&5 +$as_echo "$SED" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + + +if test "x$SED" = "xsed-was-not-found-by-configure"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: sed was not found, this may ruin your chances to build fine" >&5 +$as_echo "$as_me: WARNING: sed was not found, this may ruin your chances to build fine" >&2;} +fi + +LIBSSH2VER=`$SED -ne 's/^#define LIBSSH2_VERSION *"\(.*\)"/\1/p' ${srcdir}/include/libssh2.h` +am__api_version='1.15' + +ac_aux_dir= +for ac_dir in "$srcdir" "$srcdir/.." "$srcdir/../.."; do + if test -f "$ac_dir/install-sh"; then + ac_aux_dir=$ac_dir + ac_install_sh="$ac_aux_dir/install-sh -c" + break + elif test -f "$ac_dir/install.sh"; then + ac_aux_dir=$ac_dir + ac_install_sh="$ac_aux_dir/install.sh -c" + break + elif test -f "$ac_dir/shtool"; then + ac_aux_dir=$ac_dir + ac_install_sh="$ac_aux_dir/shtool install -c" + break + fi +done +if test -z "$ac_aux_dir"; then + as_fn_error $? "cannot find install-sh, install.sh, or shtool in \"$srcdir\" \"$srcdir/..\" \"$srcdir/../..\"" "$LINENO" 5 +fi + +# These three variables are undocumented and unsupported, +# and are intended to be withdrawn in a future Autoconf release. +# They can cause serious problems if a builder's source tree is in a directory +# whose full name contains unusual characters. +ac_config_guess="$SHELL $ac_aux_dir/config.guess" # Please don't use this var. +ac_config_sub="$SHELL $ac_aux_dir/config.sub" # Please don't use this var. +ac_configure="$SHELL $ac_aux_dir/configure" # Please don't use this var. + + +# Find a good install program. We prefer a C program (faster), +# so one script is as good as another. But avoid the broken or +# incompatible versions: +# SysV /etc/install, /usr/sbin/install +# SunOS /usr/etc/install +# IRIX /sbin/install +# AIX /bin/install +# AmigaOS /C/install, which installs bootblocks on floppy discs +# AIX 4 /usr/bin/installbsd, which doesn't work without a -g flag +# AFS /usr/afsws/bin/install, which mishandles nonexistent args +# SVR4 /usr/ucb/install, which tries to use the nonexistent group "staff" +# OS/2's system install, which has a completely different semantic +# ./install, which can be erroneously created by make from ./install.sh. +# Reject install programs that cannot install multiple files. +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a BSD-compatible install" >&5 +$as_echo_n "checking for a BSD-compatible install... " >&6; } +if test -z "$INSTALL"; then +if ${ac_cv_path_install+:} false; then : + $as_echo_n "(cached) " >&6 +else + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + # Account for people who put trailing slashes in PATH elements. +case $as_dir/ in #(( + ./ | .// | /[cC]/* | \ + /etc/* | /usr/sbin/* | /usr/etc/* | /sbin/* | /usr/afsws/bin/* | \ + ?:[\\/]os2[\\/]install[\\/]* | ?:[\\/]OS2[\\/]INSTALL[\\/]* | \ + /usr/ucb/* ) ;; + *) + # OSF1 and SCO ODT 3.0 have their own names for install. + # Don't use installbsd from OSF since it installs stuff as root + # by default. + for ac_prog in ginstall scoinst install; do + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_prog$ac_exec_ext"; then + if test $ac_prog = install && + grep dspmsg "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then + # AIX install. It has an incompatible calling convention. + : + elif test $ac_prog = install && + grep pwplus "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then + # program-specific install script used by HP pwplus--don't use. + : + else + rm -rf conftest.one conftest.two conftest.dir + echo one > conftest.one + echo two > conftest.two + mkdir conftest.dir + if "$as_dir/$ac_prog$ac_exec_ext" -c conftest.one conftest.two "`pwd`/conftest.dir" && + test -s conftest.one && test -s conftest.two && + test -s conftest.dir/conftest.one && + test -s conftest.dir/conftest.two + then + ac_cv_path_install="$as_dir/$ac_prog$ac_exec_ext -c" + break 3 + fi + fi + fi + done + done + ;; +esac + + done +IFS=$as_save_IFS + +rm -rf conftest.one conftest.two conftest.dir + +fi + if test "${ac_cv_path_install+set}" = set; then + INSTALL=$ac_cv_path_install + else + # As a last resort, use the slow shell script. Don't cache a + # value for INSTALL within a source directory, because that will + # break other packages using the cache if that directory is + # removed, or if the value is a relative name. + INSTALL=$ac_install_sh + fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $INSTALL" >&5 +$as_echo "$INSTALL" >&6; } + +# Use test -z because SunOS4 sh mishandles braces in ${var-val}. +# It thinks the first close brace ends the variable substitution. +test -z "$INSTALL_PROGRAM" && INSTALL_PROGRAM='${INSTALL}' + +test -z "$INSTALL_SCRIPT" && INSTALL_SCRIPT='${INSTALL}' + +test -z "$INSTALL_DATA" && INSTALL_DATA='${INSTALL} -m 644' + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether build environment is sane" >&5 +$as_echo_n "checking whether build environment is sane... " >&6; } +# Reject unsafe characters in $srcdir or the absolute working directory +# name. Accept space and tab only in the latter. +am_lf=' +' +case `pwd` in + *[\\\"\#\$\&\'\`$am_lf]*) + as_fn_error $? "unsafe absolute working directory name" "$LINENO" 5;; +esac +case $srcdir in + *[\\\"\#\$\&\'\`$am_lf\ \ ]*) + as_fn_error $? "unsafe srcdir value: '$srcdir'" "$LINENO" 5;; +esac + +# Do 'set' in a subshell so we don't clobber the current shell's +# arguments. Must try -L first in case configure is actually a +# symlink; some systems play weird games with the mod time of symlinks +# (eg FreeBSD returns the mod time of the symlink's containing +# directory). +if ( + am_has_slept=no + for am_try in 1 2; do + echo "timestamp, slept: $am_has_slept" > conftest.file + set X `ls -Lt "$srcdir/configure" conftest.file 2> /dev/null` + if test "$*" = "X"; then + # -L didn't work. + set X `ls -t "$srcdir/configure" conftest.file` + fi + if test "$*" != "X $srcdir/configure conftest.file" \ + && test "$*" != "X conftest.file $srcdir/configure"; then + + # If neither matched, then we have a broken ls. This can happen + # if, for instance, CONFIG_SHELL is bash and it inherits a + # broken ls alias from the environment. This has actually + # happened. Such a system could not be considered "sane". + as_fn_error $? "ls -t appears to fail. Make sure there is not a broken + alias in your environment" "$LINENO" 5 + fi + if test "$2" = conftest.file || test $am_try -eq 2; then + break + fi + # Just in case. + sleep 1 + am_has_slept=yes + done + test "$2" = conftest.file + ) +then + # Ok. + : +else + as_fn_error $? "newly created file is older than distributed files! +Check your system clock" "$LINENO" 5 +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } +# If we didn't sleep, we still need to ensure time stamps of config.status and +# generated files are strictly newer. +am_sleep_pid= +if grep 'slept: no' conftest.file >/dev/null 2>&1; then + ( sleep 1 ) & + am_sleep_pid=$! +fi + +rm -f conftest.file + +test "$program_prefix" != NONE && + program_transform_name="s&^&$program_prefix&;$program_transform_name" +# Use a double $ so make ignores it. +test "$program_suffix" != NONE && + program_transform_name="s&\$&$program_suffix&;$program_transform_name" +# Double any \ or $. +# By default was `s,x,x', remove it if useless. +ac_script='s/[\\$]/&&/g;s/;s,x,x,$//' +program_transform_name=`$as_echo "$program_transform_name" | sed "$ac_script"` + +# Expand $ac_aux_dir to an absolute path. +am_aux_dir=`cd "$ac_aux_dir" && pwd` + +if test x"${MISSING+set}" != xset; then + case $am_aux_dir in + *\ * | *\ *) + MISSING="\${SHELL} \"$am_aux_dir/missing\"" ;; + *) + MISSING="\${SHELL} $am_aux_dir/missing" ;; + esac +fi +# Use eval to expand $SHELL +if eval "$MISSING --is-lightweight"; then + am_missing_run="$MISSING " +else + am_missing_run= + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: 'missing' script is too old or missing" >&5 +$as_echo "$as_me: WARNING: 'missing' script is too old or missing" >&2;} +fi + +if test x"${install_sh+set}" != xset; then + case $am_aux_dir in + *\ * | *\ *) + install_sh="\${SHELL} '$am_aux_dir/install-sh'" ;; + *) + install_sh="\${SHELL} $am_aux_dir/install-sh" + esac +fi + +# Installed binaries are usually stripped using 'strip' when the user +# run "make install-strip". However 'strip' might not be the right +# tool to use in cross-compilation environments, therefore Automake +# will honor the 'STRIP' environment variable to overrule this program. +if test "$cross_compiling" != no; then + if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args. +set dummy ${ac_tool_prefix}strip; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_STRIP+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$STRIP"; then + ac_cv_prog_STRIP="$STRIP" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_STRIP="${ac_tool_prefix}strip" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +STRIP=$ac_cv_prog_STRIP +if test -n "$STRIP"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $STRIP" >&5 +$as_echo "$STRIP" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_STRIP"; then + ac_ct_STRIP=$STRIP + # Extract the first word of "strip", so it can be a program name with args. +set dummy strip; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_STRIP+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_STRIP"; then + ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_STRIP="strip" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP +if test -n "$ac_ct_STRIP"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_STRIP" >&5 +$as_echo "$ac_ct_STRIP" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + if test "x$ac_ct_STRIP" = x; then + STRIP=":" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + STRIP=$ac_ct_STRIP + fi +else + STRIP="$ac_cv_prog_STRIP" +fi + +fi +INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s" + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a thread-safe mkdir -p" >&5 +$as_echo_n "checking for a thread-safe mkdir -p... " >&6; } +if test -z "$MKDIR_P"; then + if ${ac_cv_path_mkdir+:} false; then : + $as_echo_n "(cached) " >&6 +else + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH$PATH_SEPARATOR/opt/sfw/bin +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_prog in mkdir gmkdir; do + for ac_exec_ext in '' $ac_executable_extensions; do + as_fn_executable_p "$as_dir/$ac_prog$ac_exec_ext" || continue + case `"$as_dir/$ac_prog$ac_exec_ext" --version 2>&1` in #( + 'mkdir (GNU coreutils) '* | \ + 'mkdir (coreutils) '* | \ + 'mkdir (fileutils) '4.1*) + ac_cv_path_mkdir=$as_dir/$ac_prog$ac_exec_ext + break 3;; + esac + done + done + done +IFS=$as_save_IFS + +fi + + test -d ./--version && rmdir ./--version + if test "${ac_cv_path_mkdir+set}" = set; then + MKDIR_P="$ac_cv_path_mkdir -p" + else + # As a last resort, use the slow shell script. Don't cache a + # value for MKDIR_P within a source directory, because that will + # break other packages using the cache if that directory is + # removed, or if the value is a relative name. + MKDIR_P="$ac_install_sh -d" + fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $MKDIR_P" >&5 +$as_echo "$MKDIR_P" >&6; } + +for ac_prog in gawk mawk nawk awk +do + # Extract the first word of "$ac_prog", so it can be a program name with args. +set dummy $ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_AWK+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$AWK"; then + ac_cv_prog_AWK="$AWK" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_AWK="$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +AWK=$ac_cv_prog_AWK +if test -n "$AWK"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AWK" >&5 +$as_echo "$AWK" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + test -n "$AWK" && break +done + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ${MAKE-make} sets \$(MAKE)" >&5 +$as_echo_n "checking whether ${MAKE-make} sets \$(MAKE)... " >&6; } +set x ${MAKE-make} +ac_make=`$as_echo "$2" | sed 's/+/p/g; s/[^a-zA-Z0-9_]/_/g'` +if eval \${ac_cv_prog_make_${ac_make}_set+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat >conftest.make <<\_ACEOF +SHELL = /bin/sh +all: + @echo '@@@%%%=$(MAKE)=@@@%%%' +_ACEOF +# GNU make sometimes prints "make[1]: Entering ...", which would confuse us. +case `${MAKE-make} -f conftest.make 2>/dev/null` in + *@@@%%%=?*=@@@%%%*) + eval ac_cv_prog_make_${ac_make}_set=yes;; + *) + eval ac_cv_prog_make_${ac_make}_set=no;; +esac +rm -f conftest.make +fi +if eval test \$ac_cv_prog_make_${ac_make}_set = yes; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + SET_MAKE= +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + SET_MAKE="MAKE=${MAKE-make}" +fi + +rm -rf .tst 2>/dev/null +mkdir .tst 2>/dev/null +if test -d .tst; then + am__leading_dot=. +else + am__leading_dot=_ +fi +rmdir .tst 2>/dev/null + +if test "`cd $srcdir && pwd`" != "`pwd`"; then + # Use -I$(srcdir) only when $(srcdir) != ., so that make's output + # is not polluted with repeated "-I." + am__isrc=' -I$(srcdir)' + # test to see if srcdir already configured + if test -f $srcdir/config.status; then + as_fn_error $? "source directory already configured; run \"make distclean\" there first" "$LINENO" 5 + fi +fi + +# test whether we have cygpath +if test -z "$CYGPATH_W"; then + if (cygpath --version) >/dev/null 2>/dev/null; then + CYGPATH_W='cygpath -w' + else + CYGPATH_W=echo + fi +fi + + +# Define the identity of the package. + PACKAGE='libssh2' + VERSION='-' + + +cat >>confdefs.h <<_ACEOF +#define PACKAGE "$PACKAGE" +_ACEOF + + +cat >>confdefs.h <<_ACEOF +#define VERSION "$VERSION" +_ACEOF + +# Some tools Automake needs. + +ACLOCAL=${ACLOCAL-"${am_missing_run}aclocal-${am__api_version}"} + + +AUTOCONF=${AUTOCONF-"${am_missing_run}autoconf"} + + +AUTOMAKE=${AUTOMAKE-"${am_missing_run}automake-${am__api_version}"} + + +AUTOHEADER=${AUTOHEADER-"${am_missing_run}autoheader"} + + +MAKEINFO=${MAKEINFO-"${am_missing_run}makeinfo"} + +# For better backward compatibility. To be removed once Automake 1.9.x +# dies out for good. For more background, see: +# +# +mkdir_p='$(MKDIR_P)' + +# We need awk for the "check" target (and possibly the TAP driver). The +# system "awk" is bad on some platforms. +# Always define AMTAR for backward compatibility. Yes, it's still used +# in the wild :-( We should find a proper way to deprecate it ... +AMTAR='$${TAR-tar}' + + +# We'll loop over all known methods to create a tar archive until one works. +_am_tools='gnutar pax cpio none' + +am__tar='$${TAR-tar} chof - "$$tardir"' am__untar='$${TAR-tar} xf -' + + + + + + +# POSIX will say in a future version that running "rm -f" with no argument +# is OK; and we want to be able to make that assumption in our Makefile +# recipes. So use an aggressive probe to check that the usage we want is +# actually supported "in the wild" to an acceptable degree. +# See automake bug#10828. +# To make any issue more visible, cause the running configure to be aborted +# by default if the 'rm' program in use doesn't match our expectations; the +# user can still override this though. +if rm -f && rm -fr && rm -rf; then : OK; else + cat >&2 <<'END' +Oops! + +Your 'rm' program seems unable to run without file operands specified +on the command line, even when the '-f' option is present. This is contrary +to the behaviour of most rm programs out there, and not conforming with +the upcoming POSIX standard: + +Please tell bug-automake@gnu.org about your system, including the value +of your $PATH and any error possibly output before this message. This +can help us improve future automake versions. + +END + if test x"$ACCEPT_INFERIOR_RM_PROGRAM" = x"yes"; then + echo 'Configuration will proceed anyway, since you have set the' >&2 + echo 'ACCEPT_INFERIOR_RM_PROGRAM variable to "yes"' >&2 + echo >&2 + else + cat >&2 <<'END' +Aborting the configuration process, to ensure you take notice of the issue. + +You can download and install GNU coreutils to get an 'rm' implementation +that behaves properly: . + +If you want to complete the configuration process using your problematic +'rm' anyway, export the environment variable ACCEPT_INFERIOR_RM_PROGRAM +to "yes", and re-run configure. + +END + as_fn_error $? "Your 'rm' program is bad, sorry." "$LINENO" 5 + fi +fi + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking libssh2 version" >&5 +$as_echo_n "checking libssh2 version... " >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $LIBSSH2VER" >&5 +$as_echo "$LIBSSH2VER" >&6; } + + + +AB_VERSION=$LIBSSH2VER + +# Make sure we can run config.sub. +$SHELL "$ac_aux_dir/config.sub" sun4 >/dev/null 2>&1 || + as_fn_error $? "cannot run $SHELL $ac_aux_dir/config.sub" "$LINENO" 5 + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking build system type" >&5 +$as_echo_n "checking build system type... " >&6; } +if ${ac_cv_build+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_build_alias=$build_alias +test "x$ac_build_alias" = x && + ac_build_alias=`$SHELL "$ac_aux_dir/config.guess"` +test "x$ac_build_alias" = x && + as_fn_error $? "cannot guess build type; you must specify one" "$LINENO" 5 +ac_cv_build=`$SHELL "$ac_aux_dir/config.sub" $ac_build_alias` || + as_fn_error $? "$SHELL $ac_aux_dir/config.sub $ac_build_alias failed" "$LINENO" 5 + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_build" >&5 +$as_echo "$ac_cv_build" >&6; } +case $ac_cv_build in +*-*-*) ;; +*) as_fn_error $? "invalid value of canonical build" "$LINENO" 5;; +esac +build=$ac_cv_build +ac_save_IFS=$IFS; IFS='-' +set x $ac_cv_build +shift +build_cpu=$1 +build_vendor=$2 +shift; shift +# Remember, the first character of IFS is used to create $*, +# except with old shells: +build_os=$* +IFS=$ac_save_IFS +case $build_os in *\ *) build_os=`echo "$build_os" | sed 's/ /-/g'`;; esac + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking host system type" >&5 +$as_echo_n "checking host system type... " >&6; } +if ${ac_cv_host+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test "x$host_alias" = x; then + ac_cv_host=$ac_cv_build +else + ac_cv_host=`$SHELL "$ac_aux_dir/config.sub" $host_alias` || + as_fn_error $? "$SHELL $ac_aux_dir/config.sub $host_alias failed" "$LINENO" 5 +fi + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_host" >&5 +$as_echo "$ac_cv_host" >&6; } +case $ac_cv_host in +*-*-*) ;; +*) as_fn_error $? "invalid value of canonical host" "$LINENO" 5;; +esac +host=$ac_cv_host +ac_save_IFS=$IFS; IFS='-' +set x $ac_cv_host +shift +host_cpu=$1 +host_vendor=$2 +shift; shift +# Remember, the first character of IFS is used to create $*, +# except with old shells: +host_os=$* +IFS=$ac_save_IFS +case $host_os in *\ *) host_os=`echo "$host_os" | sed 's/ /-/g'`;; esac + + + + + + + if test -z "$AB_PACKAGE"; then + AB_PACKAGE=${PACKAGE_NAME:-$PACKAGE} + fi + { $as_echo "$as_me:${as_lineno-$LINENO}: autobuild project... $AB_PACKAGE" >&5 +$as_echo "$as_me: autobuild project... $AB_PACKAGE" >&6;} + + if test -z "$AB_VERSION"; then + AB_VERSION=${PACKAGE_VERSION:-$VERSION} + fi + { $as_echo "$as_me:${as_lineno-$LINENO}: autobuild revision... $AB_VERSION" >&5 +$as_echo "$as_me: autobuild revision... $AB_VERSION" >&6;} + + hostname=`hostname` + if test "$hostname"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: autobuild hostname... $hostname" >&5 +$as_echo "$as_me: autobuild hostname... $hostname" >&6;} + fi + + + + date=`date +%Y%m%d-%H%M%S` + if test "$?" != 0; then + date=`date` + fi + if test "$date"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: autobuild timestamp... $date" >&5 +$as_echo "$as_me: autobuild timestamp... $date" >&6;} + fi + + +# Check for the OS. +# Daniel's note: this should not be necessary and we need to work to +# get this removed. + +case "$host" in + *-mingw*) + CFLAGS="$CFLAGS -DLIBSSH2_WIN32" + LIBS="$LIBS -lws2_32" + ;; + *-cygwin) + CFLAGS="$CFLAGS -DLIBSSH2_WIN32" + ;; + *darwin*) + CFLAGS="$CFLAGS -DLIBSSH2_DARWIN" + ;; + *hpux*) + ;; + *osf*) + CFLAGS="$CFLAGS -D_POSIX_PII_SOCKET" + ;; + *) + ;; +esac + +DEPDIR="${am__leading_dot}deps" + +ac_config_commands="$ac_config_commands depfiles" + + +am_make=${MAKE-make} +cat > confinc << 'END' +am__doit: + @echo this is the am__doit target +.PHONY: am__doit +END +# If we don't find an include directive, just comment out the code. +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for style of include used by $am_make" >&5 +$as_echo_n "checking for style of include used by $am_make... " >&6; } +am__include="#" +am__quote= +_am_result=none +# First try GNU make style include. +echo "include confinc" > confmf +# Ignore all kinds of additional output from 'make'. +case `$am_make -s -f confmf 2> /dev/null` in #( +*the\ am__doit\ target*) + am__include=include + am__quote= + _am_result=GNU + ;; +esac +# Now try BSD make style include. +if test "$am__include" = "#"; then + echo '.include "confinc"' > confmf + case `$am_make -s -f confmf 2> /dev/null` in #( + *the\ am__doit\ target*) + am__include=.include + am__quote="\"" + _am_result=BSD + ;; + esac +fi + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $_am_result" >&5 +$as_echo "$_am_result" >&6; } +rm -f confinc confmf + +# Check whether --enable-dependency-tracking was given. +if test "${enable_dependency_tracking+set}" = set; then : + enableval=$enable_dependency_tracking; +fi + +if test "x$enable_dependency_tracking" != xno; then + am_depcomp="$ac_aux_dir/depcomp" + AMDEPBACKSLASH='\' + am__nodep='_no' +fi + if test "x$enable_dependency_tracking" != xno; then + AMDEP_TRUE= + AMDEP_FALSE='#' +else + AMDEP_TRUE='#' + AMDEP_FALSE= +fi + + +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu +if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}gcc", so it can be a program name with args. +set dummy ${ac_tool_prefix}gcc; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_CC+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$CC"; then + ac_cv_prog_CC="$CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_CC="${ac_tool_prefix}gcc" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +CC=$ac_cv_prog_CC +if test -n "$CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +$as_echo "$CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_CC"; then + ac_ct_CC=$CC + # Extract the first word of "gcc", so it can be a program name with args. +set dummy gcc; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_CC+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_CC"; then + ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_CC="gcc" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_CC=$ac_cv_prog_ac_ct_CC +if test -n "$ac_ct_CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 +$as_echo "$ac_ct_CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + if test "x$ac_ct_CC" = x; then + CC="" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + CC=$ac_ct_CC + fi +else + CC="$ac_cv_prog_CC" +fi + +if test -z "$CC"; then + if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}cc", so it can be a program name with args. +set dummy ${ac_tool_prefix}cc; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_CC+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$CC"; then + ac_cv_prog_CC="$CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_CC="${ac_tool_prefix}cc" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +CC=$ac_cv_prog_CC +if test -n "$CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +$as_echo "$CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + fi +fi +if test -z "$CC"; then + # Extract the first word of "cc", so it can be a program name with args. +set dummy cc; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_CC+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$CC"; then + ac_cv_prog_CC="$CC" # Let the user override the test. +else + ac_prog_rejected=no +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then + ac_prog_rejected=yes + continue + fi + ac_cv_prog_CC="cc" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +if test $ac_prog_rejected = yes; then + # We found a bogon in the path, so make sure we never use it. + set dummy $ac_cv_prog_CC + shift + if test $# != 0; then + # We chose a different compiler from the bogus one. + # However, it has the same basename, so the bogon will be chosen + # first if we set CC to just the basename; use the full file name. + shift + ac_cv_prog_CC="$as_dir/$ac_word${1+' '}$@" + fi +fi +fi +fi +CC=$ac_cv_prog_CC +if test -n "$CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +$as_echo "$CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test -z "$CC"; then + if test -n "$ac_tool_prefix"; then + for ac_prog in cl.exe + do + # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. +set dummy $ac_tool_prefix$ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_CC+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$CC"; then + ac_cv_prog_CC="$CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_CC="$ac_tool_prefix$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +CC=$ac_cv_prog_CC +if test -n "$CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +$as_echo "$CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + test -n "$CC" && break + done +fi +if test -z "$CC"; then + ac_ct_CC=$CC + for ac_prog in cl.exe +do + # Extract the first word of "$ac_prog", so it can be a program name with args. +set dummy $ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_CC+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_CC"; then + ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_CC="$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_CC=$ac_cv_prog_ac_ct_CC +if test -n "$ac_ct_CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 +$as_echo "$ac_ct_CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + test -n "$ac_ct_CC" && break +done + + if test "x$ac_ct_CC" = x; then + CC="" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + CC=$ac_ct_CC + fi +fi + +fi + + +test -z "$CC" && { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error $? "no acceptable C compiler found in \$PATH +See \`config.log' for more details" "$LINENO" 5; } + +# Provide some information about the compiler. +$as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler version" >&5 +set X $ac_compile +ac_compiler=$2 +for ac_option in --version -v -V -qversion; do + { { ac_try="$ac_compiler $ac_option >&5" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_compiler $ac_option >&5") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + sed '10a\ +... rest of stderr output deleted ... + 10q' conftest.err >conftest.er1 + cat conftest.er1 >&5 + fi + rm -f conftest.er1 conftest.err + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } +done + +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +ac_clean_files_save=$ac_clean_files +ac_clean_files="$ac_clean_files a.out a.out.dSYM a.exe b.out" +# Try to create an executable without -o first, disregard a.out. +# It will help us diagnose broken compilers, and finding out an intuition +# of exeext. +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler works" >&5 +$as_echo_n "checking whether the C compiler works... " >&6; } +ac_link_default=`$as_echo "$ac_link" | sed 's/ -o *conftest[^ ]*//'` + +# The possible output files: +ac_files="a.out conftest.exe conftest a.exe a_out.exe b.out conftest.*" + +ac_rmfiles= +for ac_file in $ac_files +do + case $ac_file in + *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; + * ) ac_rmfiles="$ac_rmfiles $ac_file";; + esac +done +rm -f $ac_rmfiles + +if { { ac_try="$ac_link_default" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_link_default") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then : + # Autoconf-2.13 could set the ac_cv_exeext variable to `no'. +# So ignore a value of `no', otherwise this would lead to `EXEEXT = no' +# in a Makefile. We should not override ac_cv_exeext if it was cached, +# so that the user can short-circuit this test for compilers unknown to +# Autoconf. +for ac_file in $ac_files '' +do + test -f "$ac_file" || continue + case $ac_file in + *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) + ;; + [ab].out ) + # We found the default executable, but exeext='' is most + # certainly right. + break;; + *.* ) + if test "${ac_cv_exeext+set}" = set && test "$ac_cv_exeext" != no; + then :; else + ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` + fi + # We set ac_cv_exeext here because the later test for it is not + # safe: cross compilers may not add the suffix if given an `-o' + # argument, so we may need to know it at that point already. + # Even if this section looks crufty: it has the advantage of + # actually working. + break;; + * ) + break;; + esac +done +test "$ac_cv_exeext" = no && ac_cv_exeext= + +else + ac_file='' +fi +if test -z "$ac_file"; then : + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +$as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + +{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error 77 "C compiler cannot create executables +See \`config.log' for more details" "$LINENO" 5; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler default output file name" >&5 +$as_echo_n "checking for C compiler default output file name... " >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_file" >&5 +$as_echo "$ac_file" >&6; } +ac_exeext=$ac_cv_exeext + +rm -f -r a.out a.out.dSYM a.exe conftest$ac_cv_exeext b.out +ac_clean_files=$ac_clean_files_save +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of executables" >&5 +$as_echo_n "checking for suffix of executables... " >&6; } +if { { ac_try="$ac_link" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_link") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then : + # If both `conftest.exe' and `conftest' are `present' (well, observable) +# catch `conftest.exe'. For instance with Cygwin, `ls conftest' will +# work properly (i.e., refer to `conftest.exe'), while it won't with +# `rm'. +for ac_file in conftest.exe conftest conftest.*; do + test -f "$ac_file" || continue + case $ac_file in + *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; + *.* ) ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` + break;; + * ) break;; + esac +done +else + { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error $? "cannot compute suffix of executables: cannot compile and link +See \`config.log' for more details" "$LINENO" 5; } +fi +rm -f conftest conftest$ac_cv_exeext +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_exeext" >&5 +$as_echo "$ac_cv_exeext" >&6; } + +rm -f conftest.$ac_ext +EXEEXT=$ac_cv_exeext +ac_exeext=$EXEEXT +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +int +main () +{ +FILE *f = fopen ("conftest.out", "w"); + return ferror (f) || fclose (f) != 0; + + ; + return 0; +} +_ACEOF +ac_clean_files="$ac_clean_files conftest.out" +# Check that the compiler produces executables we can run. If not, either +# the compiler is broken, or we cross compile. +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are cross compiling" >&5 +$as_echo_n "checking whether we are cross compiling... " >&6; } +if test "$cross_compiling" != yes; then + { { ac_try="$ac_link" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_link") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } + if { ac_try='./conftest$ac_cv_exeext' + { { case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_try") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; }; then + cross_compiling=no + else + if test "$cross_compiling" = maybe; then + cross_compiling=yes + else + { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error $? "cannot run C compiled programs. +If you meant to cross compile, use \`--host'. +See \`config.log' for more details" "$LINENO" 5; } + fi + fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $cross_compiling" >&5 +$as_echo "$cross_compiling" >&6; } + +rm -f conftest.$ac_ext conftest$ac_cv_exeext conftest.out +ac_clean_files=$ac_clean_files_save +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of object files" >&5 +$as_echo_n "checking for suffix of object files... " >&6; } +if ${ac_cv_objext+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +rm -f conftest.o conftest.obj +if { { ac_try="$ac_compile" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_compile") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then : + for ac_file in conftest.o conftest.obj conftest.*; do + test -f "$ac_file" || continue; + case $ac_file in + *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM ) ;; + *) ac_cv_objext=`expr "$ac_file" : '.*\.\(.*\)'` + break;; + esac +done +else + $as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + +{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error $? "cannot compute suffix of object files: cannot compile +See \`config.log' for more details" "$LINENO" 5; } +fi +rm -f conftest.$ac_cv_objext conftest.$ac_ext +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_objext" >&5 +$as_echo "$ac_cv_objext" >&6; } +OBJEXT=$ac_cv_objext +ac_objext=$OBJEXT +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C compiler" >&5 +$as_echo_n "checking whether we are using the GNU C compiler... " >&6; } +if ${ac_cv_c_compiler_gnu+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ +#ifndef __GNUC__ + choke me +#endif + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_compiler_gnu=yes +else + ac_compiler_gnu=no +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +ac_cv_c_compiler_gnu=$ac_compiler_gnu + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_compiler_gnu" >&5 +$as_echo "$ac_cv_c_compiler_gnu" >&6; } +if test $ac_compiler_gnu = yes; then + GCC=yes +else + GCC= +fi +ac_test_CFLAGS=${CFLAGS+set} +ac_save_CFLAGS=$CFLAGS +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC accepts -g" >&5 +$as_echo_n "checking whether $CC accepts -g... " >&6; } +if ${ac_cv_prog_cc_g+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_save_c_werror_flag=$ac_c_werror_flag + ac_c_werror_flag=yes + ac_cv_prog_cc_g=no + CFLAGS="-g" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_prog_cc_g=yes +else + CFLAGS="" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + +else + ac_c_werror_flag=$ac_save_c_werror_flag + CFLAGS="-g" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_prog_cc_g=yes +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + ac_c_werror_flag=$ac_save_c_werror_flag +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_g" >&5 +$as_echo "$ac_cv_prog_cc_g" >&6; } +if test "$ac_test_CFLAGS" = set; then + CFLAGS=$ac_save_CFLAGS +elif test $ac_cv_prog_cc_g = yes; then + if test "$GCC" = yes; then + CFLAGS="-g -O2" + else + CFLAGS="-g" + fi +else + if test "$GCC" = yes; then + CFLAGS="-O2" + else + CFLAGS= + fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $CC option to accept ISO C89" >&5 +$as_echo_n "checking for $CC option to accept ISO C89... " >&6; } +if ${ac_cv_prog_cc_c89+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_cv_prog_cc_c89=no +ac_save_CC=$CC +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +#include +struct stat; +/* Most of the following tests are stolen from RCS 5.7's src/conf.sh. */ +struct buf { int x; }; +FILE * (*rcsopen) (struct buf *, struct stat *, int); +static char *e (p, i) + char **p; + int i; +{ + return p[i]; +} +static char *f (char * (*g) (char **, int), char **p, ...) +{ + char *s; + va_list v; + va_start (v,p); + s = g (p, va_arg (v,int)); + va_end (v); + return s; +} + +/* OSF 4.0 Compaq cc is some sort of almost-ANSI by default. It has + function prototypes and stuff, but not '\xHH' hex character constants. + These don't provoke an error unfortunately, instead are silently treated + as 'x'. The following induces an error, until -std is added to get + proper ANSI mode. Curiously '\x00'!='x' always comes out true, for an + array size at least. It's necessary to write '\x00'==0 to get something + that's true only with -std. */ +int osf4_cc_array ['\x00' == 0 ? 1 : -1]; + +/* IBM C 6 for AIX is almost-ANSI by default, but it replaces macro parameters + inside strings and character constants. */ +#define FOO(x) 'x' +int xlc6_cc_array[FOO(a) == 'x' ? 1 : -1]; + +int test (int i, double x); +struct s1 {int (*f) (int a);}; +struct s2 {int (*f) (double a);}; +int pairnames (int, char **, FILE *(*)(struct buf *, struct stat *, int), int, int); +int argc; +char **argv; +int +main () +{ +return f (e, argv, 0) != argv[0] || f (e, argv, 1) != argv[1]; + ; + return 0; +} +_ACEOF +for ac_arg in '' -qlanglvl=extc89 -qlanglvl=ansi -std \ + -Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__" +do + CC="$ac_save_CC $ac_arg" + if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_prog_cc_c89=$ac_arg +fi +rm -f core conftest.err conftest.$ac_objext + test "x$ac_cv_prog_cc_c89" != "xno" && break +done +rm -f conftest.$ac_ext +CC=$ac_save_CC + +fi +# AC_CACHE_VAL +case "x$ac_cv_prog_cc_c89" in + x) + { $as_echo "$as_me:${as_lineno-$LINENO}: result: none needed" >&5 +$as_echo "none needed" >&6; } ;; + xno) + { $as_echo "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5 +$as_echo "unsupported" >&6; } ;; + *) + CC="$CC $ac_cv_prog_cc_c89" + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c89" >&5 +$as_echo "$ac_cv_prog_cc_c89" >&6; } ;; +esac +if test "x$ac_cv_prog_cc_c89" != xno; then : + +fi + +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC understands -c and -o together" >&5 +$as_echo_n "checking whether $CC understands -c and -o together... " >&6; } +if ${am_cv_prog_cc_c_o+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF + # Make sure it works both with $CC and with simple cc. + # Following AC_PROG_CC_C_O, we do the test twice because some + # compilers refuse to overwrite an existing .o file with -o, + # though they will create one. + am_cv_prog_cc_c_o=yes + for am_i in 1 2; do + if { echo "$as_me:$LINENO: $CC -c conftest.$ac_ext -o conftest2.$ac_objext" >&5 + ($CC -c conftest.$ac_ext -o conftest2.$ac_objext) >&5 2>&5 + ac_status=$? + echo "$as_me:$LINENO: \$? = $ac_status" >&5 + (exit $ac_status); } \ + && test -f conftest2.$ac_objext; then + : OK + else + am_cv_prog_cc_c_o=no + break + fi + done + rm -f core conftest* + unset am_i +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_prog_cc_c_o" >&5 +$as_echo "$am_cv_prog_cc_c_o" >&6; } +if test "$am_cv_prog_cc_c_o" != yes; then + # Losing compiler, so override with the script. + # FIXME: It is wrong to rewrite CC. + # But if we don't then we get into trouble of one sort or another. + # A longer-term fix would be to have automake use am__CC in this case, + # and then we could set am__CC="\$(top_srcdir)/compile \$(CC)" + CC="$am_aux_dir/compile $CC" +fi +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + + +depcc="$CC" am_compiler_list= + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking dependency style of $depcc" >&5 +$as_echo_n "checking dependency style of $depcc... " >&6; } +if ${am_cv_CC_dependencies_compiler_type+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then + # We make a subdir and do the tests there. Otherwise we can end up + # making bogus files that we don't know about and never remove. For + # instance it was reported that on HP-UX the gcc test will end up + # making a dummy file named 'D' -- because '-MD' means "put the output + # in D". + rm -rf conftest.dir + mkdir conftest.dir + # Copy depcomp to subdir because otherwise we won't find it if we're + # using a relative directory. + cp "$am_depcomp" conftest.dir + cd conftest.dir + # We will build objects and dependencies in a subdirectory because + # it helps to detect inapplicable dependency modes. For instance + # both Tru64's cc and ICC support -MD to output dependencies as a + # side effect of compilation, but ICC will put the dependencies in + # the current directory while Tru64 will put them in the object + # directory. + mkdir sub + + am_cv_CC_dependencies_compiler_type=none + if test "$am_compiler_list" = ""; then + am_compiler_list=`sed -n 's/^#*\([a-zA-Z0-9]*\))$/\1/p' < ./depcomp` + fi + am__universal=false + case " $depcc " in #( + *\ -arch\ *\ -arch\ *) am__universal=true ;; + esac + + for depmode in $am_compiler_list; do + # Setup a source with many dependencies, because some compilers + # like to wrap large dependency lists on column 80 (with \), and + # we should not choose a depcomp mode which is confused by this. + # + # We need to recreate these files for each test, as the compiler may + # overwrite some of them when testing with obscure command lines. + # This happens at least with the AIX C compiler. + : > sub/conftest.c + for i in 1 2 3 4 5 6; do + echo '#include "conftst'$i'.h"' >> sub/conftest.c + # Using ": > sub/conftst$i.h" creates only sub/conftst1.h with + # Solaris 10 /bin/sh. + echo '/* dummy */' > sub/conftst$i.h + done + echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf + + # We check with '-c' and '-o' for the sake of the "dashmstdout" + # mode. It turns out that the SunPro C++ compiler does not properly + # handle '-M -o', and we need to detect this. Also, some Intel + # versions had trouble with output in subdirs. + am__obj=sub/conftest.${OBJEXT-o} + am__minus_obj="-o $am__obj" + case $depmode in + gcc) + # This depmode causes a compiler race in universal mode. + test "$am__universal" = false || continue + ;; + nosideeffect) + # After this tag, mechanisms are not by side-effect, so they'll + # only be used when explicitly requested. + if test "x$enable_dependency_tracking" = xyes; then + continue + else + break + fi + ;; + msvc7 | msvc7msys | msvisualcpp | msvcmsys) + # This compiler won't grok '-c -o', but also, the minuso test has + # not run yet. These depmodes are late enough in the game, and + # so weak that their functioning should not be impacted. + am__obj=conftest.${OBJEXT-o} + am__minus_obj= + ;; + none) break ;; + esac + if depmode=$depmode \ + source=sub/conftest.c object=$am__obj \ + depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \ + $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \ + >/dev/null 2>conftest.err && + grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 && + grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 && + grep $am__obj sub/conftest.Po > /dev/null 2>&1 && + ${MAKE-make} -s -f confmf > /dev/null 2>&1; then + # icc doesn't choke on unknown options, it will just issue warnings + # or remarks (even with -Werror). So we grep stderr for any message + # that says an option was ignored or not supported. + # When given -MP, icc 7.0 and 7.1 complain thusly: + # icc: Command line warning: ignoring option '-M'; no argument required + # The diagnosis changed in icc 8.0: + # icc: Command line remark: option '-MP' not supported + if (grep 'ignoring option' conftest.err || + grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else + am_cv_CC_dependencies_compiler_type=$depmode + break + fi + fi + done + + cd .. + rm -rf conftest.dir +else + am_cv_CC_dependencies_compiler_type=none +fi + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_CC_dependencies_compiler_type" >&5 +$as_echo "$am_cv_CC_dependencies_compiler_type" >&6; } +CCDEPMODE=depmode=$am_cv_CC_dependencies_compiler_type + + if + test "x$enable_dependency_tracking" != xno \ + && test "$am_cv_CC_dependencies_compiler_type" = gcc3; then + am__fastdepCC_TRUE= + am__fastdepCC_FALSE='#' +else + am__fastdepCC_TRUE='#' + am__fastdepCC_FALSE= +fi + + + +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to run the C preprocessor" >&5 +$as_echo_n "checking how to run the C preprocessor... " >&6; } +# On Suns, sometimes $CPP names a directory. +if test -n "$CPP" && test -d "$CPP"; then + CPP= +fi +if test -z "$CPP"; then + if ${ac_cv_prog_CPP+:} false; then : + $as_echo_n "(cached) " >&6 +else + # Double quotes because CPP needs to be expanded + for CPP in "$CC -E" "$CC -E -traditional-cpp" "/lib/cpp" + do + ac_preproc_ok=false +for ac_c_preproc_warn_flag in '' yes +do + # Use a header file that comes with gcc, so configuring glibc + # with a fresh cross-compiler works. + # Prefer to if __STDC__ is defined, since + # exists even on freestanding compilers. + # On the NeXT, cc -E runs the code through the compiler's parser, + # not just through cpp. "Syntax error" is here to catch this case. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#ifdef __STDC__ +# include +#else +# include +#endif + Syntax error +_ACEOF +if ac_fn_c_try_cpp "$LINENO"; then : + +else + # Broken: fails on valid input. +continue +fi +rm -f conftest.err conftest.i conftest.$ac_ext + + # OK, works on sane cases. Now check whether nonexistent headers + # can be detected and how. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +_ACEOF +if ac_fn_c_try_cpp "$LINENO"; then : + # Broken: success on invalid input. +continue +else + # Passes both tests. +ac_preproc_ok=: +break +fi +rm -f conftest.err conftest.i conftest.$ac_ext + +done +# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. +rm -f conftest.i conftest.err conftest.$ac_ext +if $ac_preproc_ok; then : + break +fi + + done + ac_cv_prog_CPP=$CPP + +fi + CPP=$ac_cv_prog_CPP +else + ac_cv_prog_CPP=$CPP +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $CPP" >&5 +$as_echo "$CPP" >&6; } +ac_preproc_ok=false +for ac_c_preproc_warn_flag in '' yes +do + # Use a header file that comes with gcc, so configuring glibc + # with a fresh cross-compiler works. + # Prefer to if __STDC__ is defined, since + # exists even on freestanding compilers. + # On the NeXT, cc -E runs the code through the compiler's parser, + # not just through cpp. "Syntax error" is here to catch this case. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#ifdef __STDC__ +# include +#else +# include +#endif + Syntax error +_ACEOF +if ac_fn_c_try_cpp "$LINENO"; then : + +else + # Broken: fails on valid input. +continue +fi +rm -f conftest.err conftest.i conftest.$ac_ext + + # OK, works on sane cases. Now check whether nonexistent headers + # can be detected and how. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +_ACEOF +if ac_fn_c_try_cpp "$LINENO"; then : + # Broken: success on invalid input. +continue +else + # Passes both tests. +ac_preproc_ok=: +break +fi +rm -f conftest.err conftest.i conftest.$ac_ext + +done +# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. +rm -f conftest.i conftest.err conftest.$ac_ext +if $ac_preproc_ok; then : + +else + { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error $? "C preprocessor \"$CPP\" fails sanity check +See \`config.log' for more details" "$LINENO" 5; } +fi + +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for grep that handles long lines and -e" >&5 +$as_echo_n "checking for grep that handles long lines and -e... " >&6; } +if ${ac_cv_path_GREP+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -z "$GREP"; then + ac_path_GREP_found=false + # Loop through the user's path and test for each of PROGNAME-LIST + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_prog in grep ggrep; do + for ac_exec_ext in '' $ac_executable_extensions; do + ac_path_GREP="$as_dir/$ac_prog$ac_exec_ext" + as_fn_executable_p "$ac_path_GREP" || continue +# Check for GNU ac_path_GREP and select it if it is found. + # Check for GNU $ac_path_GREP +case `"$ac_path_GREP" --version 2>&1` in +*GNU*) + ac_cv_path_GREP="$ac_path_GREP" ac_path_GREP_found=:;; +*) + ac_count=0 + $as_echo_n 0123456789 >"conftest.in" + while : + do + cat "conftest.in" "conftest.in" >"conftest.tmp" + mv "conftest.tmp" "conftest.in" + cp "conftest.in" "conftest.nl" + $as_echo 'GREP' >> "conftest.nl" + "$ac_path_GREP" -e 'GREP$' -e '-(cannot match)-' < "conftest.nl" >"conftest.out" 2>/dev/null || break + diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break + as_fn_arith $ac_count + 1 && ac_count=$as_val + if test $ac_count -gt ${ac_path_GREP_max-0}; then + # Best one so far, save it but keep looking for a better one + ac_cv_path_GREP="$ac_path_GREP" + ac_path_GREP_max=$ac_count + fi + # 10*(2^10) chars as input seems more than enough + test $ac_count -gt 10 && break + done + rm -f conftest.in conftest.tmp conftest.nl conftest.out;; +esac + + $ac_path_GREP_found && break 3 + done + done + done +IFS=$as_save_IFS + if test -z "$ac_cv_path_GREP"; then + as_fn_error $? "no acceptable grep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 + fi +else + ac_cv_path_GREP=$GREP +fi + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_GREP" >&5 +$as_echo "$ac_cv_path_GREP" >&6; } + GREP="$ac_cv_path_GREP" + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for egrep" >&5 +$as_echo_n "checking for egrep... " >&6; } +if ${ac_cv_path_EGREP+:} false; then : + $as_echo_n "(cached) " >&6 +else + if echo a | $GREP -E '(a|b)' >/dev/null 2>&1 + then ac_cv_path_EGREP="$GREP -E" + else + if test -z "$EGREP"; then + ac_path_EGREP_found=false + # Loop through the user's path and test for each of PROGNAME-LIST + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_prog in egrep; do + for ac_exec_ext in '' $ac_executable_extensions; do + ac_path_EGREP="$as_dir/$ac_prog$ac_exec_ext" + as_fn_executable_p "$ac_path_EGREP" || continue +# Check for GNU ac_path_EGREP and select it if it is found. + # Check for GNU $ac_path_EGREP +case `"$ac_path_EGREP" --version 2>&1` in +*GNU*) + ac_cv_path_EGREP="$ac_path_EGREP" ac_path_EGREP_found=:;; +*) + ac_count=0 + $as_echo_n 0123456789 >"conftest.in" + while : + do + cat "conftest.in" "conftest.in" >"conftest.tmp" + mv "conftest.tmp" "conftest.in" + cp "conftest.in" "conftest.nl" + $as_echo 'EGREP' >> "conftest.nl" + "$ac_path_EGREP" 'EGREP$' < "conftest.nl" >"conftest.out" 2>/dev/null || break + diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break + as_fn_arith $ac_count + 1 && ac_count=$as_val + if test $ac_count -gt ${ac_path_EGREP_max-0}; then + # Best one so far, save it but keep looking for a better one + ac_cv_path_EGREP="$ac_path_EGREP" + ac_path_EGREP_max=$ac_count + fi + # 10*(2^10) chars as input seems more than enough + test $ac_count -gt 10 && break + done + rm -f conftest.in conftest.tmp conftest.nl conftest.out;; +esac + + $ac_path_EGREP_found && break 3 + done + done + done +IFS=$as_save_IFS + if test -z "$ac_cv_path_EGREP"; then + as_fn_error $? "no acceptable egrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 + fi +else + ac_cv_path_EGREP=$EGREP +fi + + fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_EGREP" >&5 +$as_echo "$ac_cv_path_EGREP" >&6; } + EGREP="$ac_cv_path_EGREP" + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for ANSI C header files" >&5 +$as_echo_n "checking for ANSI C header files... " >&6; } +if ${ac_cv_header_stdc+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +#include +#include +#include + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_header_stdc=yes +else + ac_cv_header_stdc=no +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + +if test $ac_cv_header_stdc = yes; then + # SunOS 4.x string.h does not declare mem*, contrary to ANSI. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include + +_ACEOF +if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | + $EGREP "memchr" >/dev/null 2>&1; then : + +else + ac_cv_header_stdc=no +fi +rm -f conftest* + +fi + +if test $ac_cv_header_stdc = yes; then + # ISC 2.0.2 stdlib.h does not declare free, contrary to ANSI. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include + +_ACEOF +if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | + $EGREP "free" >/dev/null 2>&1; then : + +else + ac_cv_header_stdc=no +fi +rm -f conftest* + +fi + +if test $ac_cv_header_stdc = yes; then + # /bin/cc in Irix-4.0.5 gets non-ANSI ctype macros unless using -ansi. + if test "$cross_compiling" = yes; then : + : +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +#include +#if ((' ' & 0x0FF) == 0x020) +# define ISLOWER(c) ('a' <= (c) && (c) <= 'z') +# define TOUPPER(c) (ISLOWER(c) ? 'A' + ((c) - 'a') : (c)) +#else +# define ISLOWER(c) \ + (('a' <= (c) && (c) <= 'i') \ + || ('j' <= (c) && (c) <= 'r') \ + || ('s' <= (c) && (c) <= 'z')) +# define TOUPPER(c) (ISLOWER(c) ? ((c) | 0x40) : (c)) +#endif + +#define XOR(e, f) (((e) && !(f)) || (!(e) && (f))) +int +main () +{ + int i; + for (i = 0; i < 256; i++) + if (XOR (islower (i), ISLOWER (i)) + || toupper (i) != TOUPPER (i)) + return 2; + return 0; +} +_ACEOF +if ac_fn_c_try_run "$LINENO"; then : + +else + ac_cv_header_stdc=no +fi +rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ + conftest.$ac_objext conftest.beam conftest.$ac_ext +fi + +fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_stdc" >&5 +$as_echo "$ac_cv_header_stdc" >&6; } +if test $ac_cv_header_stdc = yes; then + +$as_echo "#define STDC_HEADERS 1" >>confdefs.h + +fi + +# On IRIX 5.3, sys/types and inttypes.h are conflicting. +for ac_header in sys/types.h sys/stat.h stdlib.h string.h memory.h strings.h \ + inttypes.h stdint.h unistd.h +do : + as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` +ac_fn_c_check_header_compile "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default +" +if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : + cat >>confdefs.h <<_ACEOF +#define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 +_ACEOF + +fi + +done + + +ac_fn_c_check_type "$LINENO" "long long" "ac_cv_type_long_long" "$ac_includes_default" +if test "x$ac_cv_type_long_long" = xyes; then : + +$as_echo "#define HAVE_LONGLONG 1" >>confdefs.h + + longlong="yes" + +fi + + + + # + { $as_echo "$as_me:${as_lineno-$LINENO}: checking if _REENTRANT is already defined" >&5 +$as_echo_n "checking if _REENTRANT is already defined... " >&6; } + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + + + +int +main () +{ + +#ifdef _REENTRANT + int dummy=1; +#else + force compilation error +#endif + + ; + return 0; +} + +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + tmp_reentrant_initially_defined="yes" + +else + + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + tmp_reentrant_initially_defined="no" + +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + # + if test "$tmp_reentrant_initially_defined" = "no"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: checking if _REENTRANT is actually needed" >&5 +$as_echo_n "checking if _REENTRANT is actually needed... " >&6; } + + case $host in + *-*-solaris* | *-*-hpux*) + tmp_need_reentrant="yes" + ;; + *) + tmp_need_reentrant="no" + ;; + esac + + + if test "$tmp_need_reentrant" = "yes"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + fi + fi + # + { $as_echo "$as_me:${as_lineno-$LINENO}: checking if _REENTRANT is onwards defined" >&5 +$as_echo_n "checking if _REENTRANT is onwards defined... " >&6; } + if test "$tmp_reentrant_initially_defined" = "yes" || + test "$tmp_need_reentrant" = "yes"; then + + +$as_echo "#define NEED_REENTRANT 1" >>confdefs.h + +cat >>confdefs.h <<_EOF +#ifndef _REENTRANT +# define _REENTRANT +#endif +_EOF + + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + fi + # + + +# Some systems (Solaris?) have socket() in -lsocket. +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing socket" >&5 +$as_echo_n "checking for library containing socket... " >&6; } +if ${ac_cv_search_socket+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_func_search_save_LIBS=$LIBS +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. */ +#ifdef __cplusplus +extern "C" +#endif +char socket (); +int +main () +{ +return socket (); + ; + return 0; +} +_ACEOF +for ac_lib in '' socket; do + if test -z "$ac_lib"; then + ac_res="none required" + else + ac_res=-l$ac_lib + LIBS="-l$ac_lib $ac_func_search_save_LIBS" + fi + if ac_fn_c_try_link "$LINENO"; then : + ac_cv_search_socket=$ac_res +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext + if ${ac_cv_search_socket+:} false; then : + break +fi +done +if ${ac_cv_search_socket+:} false; then : + +else + ac_cv_search_socket=no +fi +rm conftest.$ac_ext +LIBS=$ac_func_search_save_LIBS +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_socket" >&5 +$as_echo "$ac_cv_search_socket" >&6; } +ac_res=$ac_cv_search_socket +if test "$ac_res" != no; then : + test "$ac_res" = "none required" || LIBS="$ac_res $LIBS" + +fi + + +# Solaris has inet_addr() in -lnsl. +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing inet_addr" >&5 +$as_echo_n "checking for library containing inet_addr... " >&6; } +if ${ac_cv_search_inet_addr+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_func_search_save_LIBS=$LIBS +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. */ +#ifdef __cplusplus +extern "C" +#endif +char inet_addr (); +int +main () +{ +return inet_addr (); + ; + return 0; +} +_ACEOF +for ac_lib in '' nsl; do + if test -z "$ac_lib"; then + ac_res="none required" + else + ac_res=-l$ac_lib + LIBS="-l$ac_lib $ac_func_search_save_LIBS" + fi + if ac_fn_c_try_link "$LINENO"; then : + ac_cv_search_inet_addr=$ac_res +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext + if ${ac_cv_search_inet_addr+:} false; then : + break +fi +done +if ${ac_cv_search_inet_addr+:} false; then : + +else + ac_cv_search_inet_addr=no +fi +rm conftest.$ac_ext +LIBS=$ac_func_search_save_LIBS +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_inet_addr" >&5 +$as_echo "$ac_cv_search_inet_addr" >&6; } +ac_res=$ac_cv_search_inet_addr +if test "$ac_res" != no; then : + test "$ac_res" = "none required" || LIBS="$ac_res $LIBS" + +fi + + + + +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu +if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}gcc", so it can be a program name with args. +set dummy ${ac_tool_prefix}gcc; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_CC+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$CC"; then + ac_cv_prog_CC="$CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_CC="${ac_tool_prefix}gcc" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +CC=$ac_cv_prog_CC +if test -n "$CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +$as_echo "$CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_CC"; then + ac_ct_CC=$CC + # Extract the first word of "gcc", so it can be a program name with args. +set dummy gcc; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_CC+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_CC"; then + ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_CC="gcc" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_CC=$ac_cv_prog_ac_ct_CC +if test -n "$ac_ct_CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 +$as_echo "$ac_ct_CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + if test "x$ac_ct_CC" = x; then + CC="" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + CC=$ac_ct_CC + fi +else + CC="$ac_cv_prog_CC" +fi + +if test -z "$CC"; then + if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}cc", so it can be a program name with args. +set dummy ${ac_tool_prefix}cc; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_CC+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$CC"; then + ac_cv_prog_CC="$CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_CC="${ac_tool_prefix}cc" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +CC=$ac_cv_prog_CC +if test -n "$CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +$as_echo "$CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + fi +fi +if test -z "$CC"; then + # Extract the first word of "cc", so it can be a program name with args. +set dummy cc; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_CC+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$CC"; then + ac_cv_prog_CC="$CC" # Let the user override the test. +else + ac_prog_rejected=no +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then + ac_prog_rejected=yes + continue + fi + ac_cv_prog_CC="cc" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +if test $ac_prog_rejected = yes; then + # We found a bogon in the path, so make sure we never use it. + set dummy $ac_cv_prog_CC + shift + if test $# != 0; then + # We chose a different compiler from the bogus one. + # However, it has the same basename, so the bogon will be chosen + # first if we set CC to just the basename; use the full file name. + shift + ac_cv_prog_CC="$as_dir/$ac_word${1+' '}$@" + fi +fi +fi +fi +CC=$ac_cv_prog_CC +if test -n "$CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +$as_echo "$CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test -z "$CC"; then + if test -n "$ac_tool_prefix"; then + for ac_prog in cl.exe + do + # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. +set dummy $ac_tool_prefix$ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_CC+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$CC"; then + ac_cv_prog_CC="$CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_CC="$ac_tool_prefix$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +CC=$ac_cv_prog_CC +if test -n "$CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +$as_echo "$CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + test -n "$CC" && break + done +fi +if test -z "$CC"; then + ac_ct_CC=$CC + for ac_prog in cl.exe +do + # Extract the first word of "$ac_prog", so it can be a program name with args. +set dummy $ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_CC+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_CC"; then + ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_CC="$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_CC=$ac_cv_prog_ac_ct_CC +if test -n "$ac_ct_CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 +$as_echo "$ac_ct_CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + test -n "$ac_ct_CC" && break +done + + if test "x$ac_ct_CC" = x; then + CC="" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + CC=$ac_ct_CC + fi +fi + +fi + + +test -z "$CC" && { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error $? "no acceptable C compiler found in \$PATH +See \`config.log' for more details" "$LINENO" 5; } + +# Provide some information about the compiler. +$as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler version" >&5 +set X $ac_compile +ac_compiler=$2 +for ac_option in --version -v -V -qversion; do + { { ac_try="$ac_compiler $ac_option >&5" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_compiler $ac_option >&5") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + sed '10a\ +... rest of stderr output deleted ... + 10q' conftest.err >conftest.er1 + cat conftest.er1 >&5 + fi + rm -f conftest.er1 conftest.err + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } +done + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C compiler" >&5 +$as_echo_n "checking whether we are using the GNU C compiler... " >&6; } +if ${ac_cv_c_compiler_gnu+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ +#ifndef __GNUC__ + choke me +#endif + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_compiler_gnu=yes +else + ac_compiler_gnu=no +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +ac_cv_c_compiler_gnu=$ac_compiler_gnu + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_compiler_gnu" >&5 +$as_echo "$ac_cv_c_compiler_gnu" >&6; } +if test $ac_compiler_gnu = yes; then + GCC=yes +else + GCC= +fi +ac_test_CFLAGS=${CFLAGS+set} +ac_save_CFLAGS=$CFLAGS +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC accepts -g" >&5 +$as_echo_n "checking whether $CC accepts -g... " >&6; } +if ${ac_cv_prog_cc_g+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_save_c_werror_flag=$ac_c_werror_flag + ac_c_werror_flag=yes + ac_cv_prog_cc_g=no + CFLAGS="-g" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_prog_cc_g=yes +else + CFLAGS="" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + +else + ac_c_werror_flag=$ac_save_c_werror_flag + CFLAGS="-g" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_prog_cc_g=yes +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + ac_c_werror_flag=$ac_save_c_werror_flag +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_g" >&5 +$as_echo "$ac_cv_prog_cc_g" >&6; } +if test "$ac_test_CFLAGS" = set; then + CFLAGS=$ac_save_CFLAGS +elif test $ac_cv_prog_cc_g = yes; then + if test "$GCC" = yes; then + CFLAGS="-g -O2" + else + CFLAGS="-g" + fi +else + if test "$GCC" = yes; then + CFLAGS="-O2" + else + CFLAGS= + fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $CC option to accept ISO C89" >&5 +$as_echo_n "checking for $CC option to accept ISO C89... " >&6; } +if ${ac_cv_prog_cc_c89+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_cv_prog_cc_c89=no +ac_save_CC=$CC +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +#include +struct stat; +/* Most of the following tests are stolen from RCS 5.7's src/conf.sh. */ +struct buf { int x; }; +FILE * (*rcsopen) (struct buf *, struct stat *, int); +static char *e (p, i) + char **p; + int i; +{ + return p[i]; +} +static char *f (char * (*g) (char **, int), char **p, ...) +{ + char *s; + va_list v; + va_start (v,p); + s = g (p, va_arg (v,int)); + va_end (v); + return s; +} + +/* OSF 4.0 Compaq cc is some sort of almost-ANSI by default. It has + function prototypes and stuff, but not '\xHH' hex character constants. + These don't provoke an error unfortunately, instead are silently treated + as 'x'. The following induces an error, until -std is added to get + proper ANSI mode. Curiously '\x00'!='x' always comes out true, for an + array size at least. It's necessary to write '\x00'==0 to get something + that's true only with -std. */ +int osf4_cc_array ['\x00' == 0 ? 1 : -1]; + +/* IBM C 6 for AIX is almost-ANSI by default, but it replaces macro parameters + inside strings and character constants. */ +#define FOO(x) 'x' +int xlc6_cc_array[FOO(a) == 'x' ? 1 : -1]; + +int test (int i, double x); +struct s1 {int (*f) (int a);}; +struct s2 {int (*f) (double a);}; +int pairnames (int, char **, FILE *(*)(struct buf *, struct stat *, int), int, int); +int argc; +char **argv; +int +main () +{ +return f (e, argv, 0) != argv[0] || f (e, argv, 1) != argv[1]; + ; + return 0; +} +_ACEOF +for ac_arg in '' -qlanglvl=extc89 -qlanglvl=ansi -std \ + -Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__" +do + CC="$ac_save_CC $ac_arg" + if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_prog_cc_c89=$ac_arg +fi +rm -f core conftest.err conftest.$ac_objext + test "x$ac_cv_prog_cc_c89" != "xno" && break +done +rm -f conftest.$ac_ext +CC=$ac_save_CC + +fi +# AC_CACHE_VAL +case "x$ac_cv_prog_cc_c89" in + x) + { $as_echo "$as_me:${as_lineno-$LINENO}: result: none needed" >&5 +$as_echo "none needed" >&6; } ;; + xno) + { $as_echo "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5 +$as_echo "unsupported" >&6; } ;; + *) + CC="$CC $ac_cv_prog_cc_c89" + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c89" >&5 +$as_echo "$ac_cv_prog_cc_c89" >&6; } ;; +esac +if test "x$ac_cv_prog_cc_c89" != xno; then : + +fi + +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC understands -c and -o together" >&5 +$as_echo_n "checking whether $CC understands -c and -o together... " >&6; } +if ${am_cv_prog_cc_c_o+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF + # Make sure it works both with $CC and with simple cc. + # Following AC_PROG_CC_C_O, we do the test twice because some + # compilers refuse to overwrite an existing .o file with -o, + # though they will create one. + am_cv_prog_cc_c_o=yes + for am_i in 1 2; do + if { echo "$as_me:$LINENO: $CC -c conftest.$ac_ext -o conftest2.$ac_objext" >&5 + ($CC -c conftest.$ac_ext -o conftest2.$ac_objext) >&5 2>&5 + ac_status=$? + echo "$as_me:$LINENO: \$? = $ac_status" >&5 + (exit $ac_status); } \ + && test -f conftest2.$ac_objext; then + : OK + else + am_cv_prog_cc_c_o=no + break + fi + done + rm -f core conftest* + unset am_i +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_prog_cc_c_o" >&5 +$as_echo "$am_cv_prog_cc_c_o" >&6; } +if test "$am_cv_prog_cc_c_o" != yes; then + # Losing compiler, so override with the script. + # FIXME: It is wrong to rewrite CC. + # But if we don't then we get into trouble of one sort or another. + # A longer-term fix would be to have automake use am__CC in this case, + # and then we could set am__CC="\$(top_srcdir)/compile \$(CC)" + CC="$am_aux_dir/compile $CC" +fi +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + + +depcc="$CC" am_compiler_list= + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking dependency style of $depcc" >&5 +$as_echo_n "checking dependency style of $depcc... " >&6; } +if ${am_cv_CC_dependencies_compiler_type+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then + # We make a subdir and do the tests there. Otherwise we can end up + # making bogus files that we don't know about and never remove. For + # instance it was reported that on HP-UX the gcc test will end up + # making a dummy file named 'D' -- because '-MD' means "put the output + # in D". + rm -rf conftest.dir + mkdir conftest.dir + # Copy depcomp to subdir because otherwise we won't find it if we're + # using a relative directory. + cp "$am_depcomp" conftest.dir + cd conftest.dir + # We will build objects and dependencies in a subdirectory because + # it helps to detect inapplicable dependency modes. For instance + # both Tru64's cc and ICC support -MD to output dependencies as a + # side effect of compilation, but ICC will put the dependencies in + # the current directory while Tru64 will put them in the object + # directory. + mkdir sub + + am_cv_CC_dependencies_compiler_type=none + if test "$am_compiler_list" = ""; then + am_compiler_list=`sed -n 's/^#*\([a-zA-Z0-9]*\))$/\1/p' < ./depcomp` + fi + am__universal=false + case " $depcc " in #( + *\ -arch\ *\ -arch\ *) am__universal=true ;; + esac + + for depmode in $am_compiler_list; do + # Setup a source with many dependencies, because some compilers + # like to wrap large dependency lists on column 80 (with \), and + # we should not choose a depcomp mode which is confused by this. + # + # We need to recreate these files for each test, as the compiler may + # overwrite some of them when testing with obscure command lines. + # This happens at least with the AIX C compiler. + : > sub/conftest.c + for i in 1 2 3 4 5 6; do + echo '#include "conftst'$i'.h"' >> sub/conftest.c + # Using ": > sub/conftst$i.h" creates only sub/conftst1.h with + # Solaris 10 /bin/sh. + echo '/* dummy */' > sub/conftst$i.h + done + echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf + + # We check with '-c' and '-o' for the sake of the "dashmstdout" + # mode. It turns out that the SunPro C++ compiler does not properly + # handle '-M -o', and we need to detect this. Also, some Intel + # versions had trouble with output in subdirs. + am__obj=sub/conftest.${OBJEXT-o} + am__minus_obj="-o $am__obj" + case $depmode in + gcc) + # This depmode causes a compiler race in universal mode. + test "$am__universal" = false || continue + ;; + nosideeffect) + # After this tag, mechanisms are not by side-effect, so they'll + # only be used when explicitly requested. + if test "x$enable_dependency_tracking" = xyes; then + continue + else + break + fi + ;; + msvc7 | msvc7msys | msvisualcpp | msvcmsys) + # This compiler won't grok '-c -o', but also, the minuso test has + # not run yet. These depmodes are late enough in the game, and + # so weak that their functioning should not be impacted. + am__obj=conftest.${OBJEXT-o} + am__minus_obj= + ;; + none) break ;; + esac + if depmode=$depmode \ + source=sub/conftest.c object=$am__obj \ + depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \ + $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \ + >/dev/null 2>conftest.err && + grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 && + grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 && + grep $am__obj sub/conftest.Po > /dev/null 2>&1 && + ${MAKE-make} -s -f confmf > /dev/null 2>&1; then + # icc doesn't choke on unknown options, it will just issue warnings + # or remarks (even with -Werror). So we grep stderr for any message + # that says an option was ignored or not supported. + # When given -MP, icc 7.0 and 7.1 complain thusly: + # icc: Command line warning: ignoring option '-M'; no argument required + # The diagnosis changed in icc 8.0: + # icc: Command line remark: option '-MP' not supported + if (grep 'ignoring option' conftest.err || + grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else + am_cv_CC_dependencies_compiler_type=$depmode + break + fi + fi + done + + cd .. + rm -rf conftest.dir +else + am_cv_CC_dependencies_compiler_type=none +fi + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_CC_dependencies_compiler_type" >&5 +$as_echo "$am_cv_CC_dependencies_compiler_type" >&6; } +CCDEPMODE=depmode=$am_cv_CC_dependencies_compiler_type + + if + test "x$enable_dependency_tracking" != xno \ + && test "$am_cv_CC_dependencies_compiler_type" = gcc3; then + am__fastdepCC_TRUE= + am__fastdepCC_FALSE='#' +else + am__fastdepCC_TRUE='#' + am__fastdepCC_FALSE= +fi + + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ln -s works" >&5 +$as_echo_n "checking whether ln -s works... " >&6; } +LN_S=$as_ln_s +if test "$LN_S" = "ln -s"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no, using $LN_S" >&5 +$as_echo "no, using $LN_S" >&6; } +fi + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ${MAKE-make} sets \$(MAKE)" >&5 +$as_echo_n "checking whether ${MAKE-make} sets \$(MAKE)... " >&6; } +set x ${MAKE-make} +ac_make=`$as_echo "$2" | sed 's/+/p/g; s/[^a-zA-Z0-9_]/_/g'` +if eval \${ac_cv_prog_make_${ac_make}_set+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat >conftest.make <<\_ACEOF +SHELL = /bin/sh +all: + @echo '@@@%%%=$(MAKE)=@@@%%%' +_ACEOF +# GNU make sometimes prints "make[1]: Entering ...", which would confuse us. +case `${MAKE-make} -f conftest.make 2>/dev/null` in + *@@@%%%=?*=@@@%%%*) + eval ac_cv_prog_make_${ac_make}_set=yes;; + *) + eval ac_cv_prog_make_${ac_make}_set=no;; +esac +rm -f conftest.make +fi +if eval test \$ac_cv_prog_make_${ac_make}_set = yes; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + SET_MAKE= +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + SET_MAKE="MAKE=${MAKE-make}" +fi + +for ac_prog in sshd +do + # Extract the first word of "$ac_prog", so it can be a program name with args. +set dummy $ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_path_SSHD+:} false; then : + $as_echo_n "(cached) " >&6 +else + case $SSHD in + [\\/]* | ?:[\\/]*) + ac_cv_path_SSHD="$SSHD" # Let the user override the test with a path. + ;; + *) + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH$PATH_SEPARATOR/usr/libexec$PATH_SEPARATOR /usr/sbin$PATH_SEPARATOR/usr/etc$PATH_SEPARATOR/etc +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_path_SSHD="$as_dir/$ac_word$ac_exec_ext" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + + ;; +esac +fi +SSHD=$ac_cv_path_SSHD +if test -n "$SSHD"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $SSHD" >&5 +$as_echo "$SSHD" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + test -n "$SSHD" && break +done + + if test -n "$SSHD"; then + SSHD_TRUE= + SSHD_FALSE='#' +else + SSHD_TRUE='#' + SSHD_FALSE= +fi + +enable_win32_dll=yes + +case $host in +*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-cegcc*) + if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}as", so it can be a program name with args. +set dummy ${ac_tool_prefix}as; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_AS+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$AS"; then + ac_cv_prog_AS="$AS" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_AS="${ac_tool_prefix}as" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +AS=$ac_cv_prog_AS +if test -n "$AS"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AS" >&5 +$as_echo "$AS" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_AS"; then + ac_ct_AS=$AS + # Extract the first word of "as", so it can be a program name with args. +set dummy as; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_AS+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_AS"; then + ac_cv_prog_ac_ct_AS="$ac_ct_AS" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_AS="as" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_AS=$ac_cv_prog_ac_ct_AS +if test -n "$ac_ct_AS"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_AS" >&5 +$as_echo "$ac_ct_AS" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + if test "x$ac_ct_AS" = x; then + AS="false" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + AS=$ac_ct_AS + fi +else + AS="$ac_cv_prog_AS" +fi + + if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args. +set dummy ${ac_tool_prefix}dlltool; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_DLLTOOL+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$DLLTOOL"; then + ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +DLLTOOL=$ac_cv_prog_DLLTOOL +if test -n "$DLLTOOL"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5 +$as_echo "$DLLTOOL" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_DLLTOOL"; then + ac_ct_DLLTOOL=$DLLTOOL + # Extract the first word of "dlltool", so it can be a program name with args. +set dummy dlltool; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_DLLTOOL+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_DLLTOOL"; then + ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_DLLTOOL="dlltool" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL +if test -n "$ac_ct_DLLTOOL"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5 +$as_echo "$ac_ct_DLLTOOL" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + if test "x$ac_ct_DLLTOOL" = x; then + DLLTOOL="false" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + DLLTOOL=$ac_ct_DLLTOOL + fi +else + DLLTOOL="$ac_cv_prog_DLLTOOL" +fi + + if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}objdump", so it can be a program name with args. +set dummy ${ac_tool_prefix}objdump; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_OBJDUMP+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$OBJDUMP"; then + ac_cv_prog_OBJDUMP="$OBJDUMP" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_OBJDUMP="${ac_tool_prefix}objdump" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +OBJDUMP=$ac_cv_prog_OBJDUMP +if test -n "$OBJDUMP"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OBJDUMP" >&5 +$as_echo "$OBJDUMP" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_OBJDUMP"; then + ac_ct_OBJDUMP=$OBJDUMP + # Extract the first word of "objdump", so it can be a program name with args. +set dummy objdump; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_OBJDUMP+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_OBJDUMP"; then + ac_cv_prog_ac_ct_OBJDUMP="$ac_ct_OBJDUMP" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_OBJDUMP="objdump" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_OBJDUMP=$ac_cv_prog_ac_ct_OBJDUMP +if test -n "$ac_ct_OBJDUMP"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OBJDUMP" >&5 +$as_echo "$ac_ct_OBJDUMP" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + if test "x$ac_ct_OBJDUMP" = x; then + OBJDUMP="false" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + OBJDUMP=$ac_ct_OBJDUMP + fi +else + OBJDUMP="$ac_cv_prog_OBJDUMP" +fi + + ;; +esac + +test -z "$AS" && AS=as + + + + + +test -z "$DLLTOOL" && DLLTOOL=dlltool + + + + + +test -z "$OBJDUMP" && OBJDUMP=objdump + + + + + + + +case `pwd` in + *\ * | *\ *) + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Libtool does not cope well with whitespace in \`pwd\`" >&5 +$as_echo "$as_me: WARNING: Libtool does not cope well with whitespace in \`pwd\`" >&2;} ;; +esac + + + +macro_version='2.4.2' +macro_revision='1.3337' + + + + + + + + + + + + + +ltmain="$ac_aux_dir/ltmain.sh" + +# Backslashify metacharacters that are still active within +# double-quoted strings. +sed_quote_subst='s/\(["`$\\]\)/\\\1/g' + +# Same as above, but do not quote variable references. +double_quote_subst='s/\(["`\\]\)/\\\1/g' + +# Sed substitution to delay expansion of an escaped shell variable in a +# double_quote_subst'ed string. +delay_variable_subst='s/\\\\\\\\\\\$/\\\\\\$/g' + +# Sed substitution to delay expansion of an escaped single quote. +delay_single_quote_subst='s/'\''/'\'\\\\\\\'\''/g' + +# Sed substitution to avoid accidental globbing in evaled expressions +no_glob_subst='s/\*/\\\*/g' + +ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' +ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO +ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5 +$as_echo_n "checking how to print strings... " >&6; } +# Test print first, because it will be a builtin if present. +if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \ + test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then + ECHO='print -r --' +elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then + ECHO='printf %s\n' +else + # Use this function as a fallback that always works. + func_fallback_echo () + { + eval 'cat <<_LTECHO_EOF +$1 +_LTECHO_EOF' + } + ECHO='func_fallback_echo' +fi + +# func_echo_all arg... +# Invoke $ECHO with all args, space-separated. +func_echo_all () +{ + $ECHO "" +} + +case "$ECHO" in + printf*) { $as_echo "$as_me:${as_lineno-$LINENO}: result: printf" >&5 +$as_echo "printf" >&6; } ;; + print*) { $as_echo "$as_me:${as_lineno-$LINENO}: result: print -r" >&5 +$as_echo "print -r" >&6; } ;; + *) { $as_echo "$as_me:${as_lineno-$LINENO}: result: cat" >&5 +$as_echo "cat" >&6; } ;; +esac + + + + + + + + + + + + + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a sed that does not truncate output" >&5 +$as_echo_n "checking for a sed that does not truncate output... " >&6; } +if ${ac_cv_path_SED+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_script=s/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb/ + for ac_i in 1 2 3 4 5 6 7; do + ac_script="$ac_script$as_nl$ac_script" + done + echo "$ac_script" 2>/dev/null | sed 99q >conftest.sed + { ac_script=; unset ac_script;} + if test -z "$SED"; then + ac_path_SED_found=false + # Loop through the user's path and test for each of PROGNAME-LIST + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_prog in sed gsed; do + for ac_exec_ext in '' $ac_executable_extensions; do + ac_path_SED="$as_dir/$ac_prog$ac_exec_ext" + as_fn_executable_p "$ac_path_SED" || continue +# Check for GNU ac_path_SED and select it if it is found. + # Check for GNU $ac_path_SED +case `"$ac_path_SED" --version 2>&1` in +*GNU*) + ac_cv_path_SED="$ac_path_SED" ac_path_SED_found=:;; +*) + ac_count=0 + $as_echo_n 0123456789 >"conftest.in" + while : + do + cat "conftest.in" "conftest.in" >"conftest.tmp" + mv "conftest.tmp" "conftest.in" + cp "conftest.in" "conftest.nl" + $as_echo '' >> "conftest.nl" + "$ac_path_SED" -f conftest.sed < "conftest.nl" >"conftest.out" 2>/dev/null || break + diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break + as_fn_arith $ac_count + 1 && ac_count=$as_val + if test $ac_count -gt ${ac_path_SED_max-0}; then + # Best one so far, save it but keep looking for a better one + ac_cv_path_SED="$ac_path_SED" + ac_path_SED_max=$ac_count + fi + # 10*(2^10) chars as input seems more than enough + test $ac_count -gt 10 && break + done + rm -f conftest.in conftest.tmp conftest.nl conftest.out;; +esac + + $ac_path_SED_found && break 3 + done + done + done +IFS=$as_save_IFS + if test -z "$ac_cv_path_SED"; then + as_fn_error $? "no acceptable sed could be found in \$PATH" "$LINENO" 5 + fi +else + ac_cv_path_SED=$SED +fi + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_SED" >&5 +$as_echo "$ac_cv_path_SED" >&6; } + SED="$ac_cv_path_SED" + rm -f conftest.sed + +test -z "$SED" && SED=sed +Xsed="$SED -e 1s/^X//" + + + + + + + + + + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for fgrep" >&5 +$as_echo_n "checking for fgrep... " >&6; } +if ${ac_cv_path_FGREP+:} false; then : + $as_echo_n "(cached) " >&6 +else + if echo 'ab*c' | $GREP -F 'ab*c' >/dev/null 2>&1 + then ac_cv_path_FGREP="$GREP -F" + else + if test -z "$FGREP"; then + ac_path_FGREP_found=false + # Loop through the user's path and test for each of PROGNAME-LIST + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_prog in fgrep; do + for ac_exec_ext in '' $ac_executable_extensions; do + ac_path_FGREP="$as_dir/$ac_prog$ac_exec_ext" + as_fn_executable_p "$ac_path_FGREP" || continue +# Check for GNU ac_path_FGREP and select it if it is found. + # Check for GNU $ac_path_FGREP +case `"$ac_path_FGREP" --version 2>&1` in +*GNU*) + ac_cv_path_FGREP="$ac_path_FGREP" ac_path_FGREP_found=:;; +*) + ac_count=0 + $as_echo_n 0123456789 >"conftest.in" + while : + do + cat "conftest.in" "conftest.in" >"conftest.tmp" + mv "conftest.tmp" "conftest.in" + cp "conftest.in" "conftest.nl" + $as_echo 'FGREP' >> "conftest.nl" + "$ac_path_FGREP" FGREP < "conftest.nl" >"conftest.out" 2>/dev/null || break + diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break + as_fn_arith $ac_count + 1 && ac_count=$as_val + if test $ac_count -gt ${ac_path_FGREP_max-0}; then + # Best one so far, save it but keep looking for a better one + ac_cv_path_FGREP="$ac_path_FGREP" + ac_path_FGREP_max=$ac_count + fi + # 10*(2^10) chars as input seems more than enough + test $ac_count -gt 10 && break + done + rm -f conftest.in conftest.tmp conftest.nl conftest.out;; +esac + + $ac_path_FGREP_found && break 3 + done + done + done +IFS=$as_save_IFS + if test -z "$ac_cv_path_FGREP"; then + as_fn_error $? "no acceptable fgrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 + fi +else + ac_cv_path_FGREP=$FGREP +fi + + fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_FGREP" >&5 +$as_echo "$ac_cv_path_FGREP" >&6; } + FGREP="$ac_cv_path_FGREP" + + +test -z "$GREP" && GREP=grep + + + + + + + + + + + + + + + + + + + +# Check whether --with-gnu-ld was given. +if test "${with_gnu_ld+set}" = set; then : + withval=$with_gnu_ld; test "$withval" = no || with_gnu_ld=yes +else + with_gnu_ld=no +fi + +ac_prog=ld +if test "$GCC" = yes; then + # Check if gcc -print-prog-name=ld gives a path. + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ld used by $CC" >&5 +$as_echo_n "checking for ld used by $CC... " >&6; } + case $host in + *-*-mingw*) + # gcc leaves a trailing carriage return which upsets mingw + ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;; + *) + ac_prog=`($CC -print-prog-name=ld) 2>&5` ;; + esac + case $ac_prog in + # Accept absolute paths. + [\\/]* | ?:[\\/]*) + re_direlt='/[^/][^/]*/\.\./' + # Canonicalize the pathname of ld + ac_prog=`$ECHO "$ac_prog"| $SED 's%\\\\%/%g'` + while $ECHO "$ac_prog" | $GREP "$re_direlt" > /dev/null 2>&1; do + ac_prog=`$ECHO $ac_prog| $SED "s%$re_direlt%/%"` + done + test -z "$LD" && LD="$ac_prog" + ;; + "") + # If it fails, then pretend we aren't using GCC. + ac_prog=ld + ;; + *) + # If it is relative, then search for the first ld in PATH. + with_gnu_ld=unknown + ;; + esac +elif test "$with_gnu_ld" = yes; then + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU ld" >&5 +$as_echo_n "checking for GNU ld... " >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for non-GNU ld" >&5 +$as_echo_n "checking for non-GNU ld... " >&6; } +fi +if ${lt_cv_path_LD+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -z "$LD"; then + lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR + for ac_dir in $PATH; do + IFS="$lt_save_ifs" + test -z "$ac_dir" && ac_dir=. + if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then + lt_cv_path_LD="$ac_dir/$ac_prog" + # Check to see if the program is GNU ld. I'd rather use --version, + # but apparently some variants of GNU ld only accept -v. + # Break only if it was the GNU/non-GNU ld that we prefer. + case `"$lt_cv_path_LD" -v 2>&1 &5 +$as_echo "$LD" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi +test -z "$LD" && as_fn_error $? "no acceptable ld found in \$PATH" "$LINENO" 5 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if the linker ($LD) is GNU ld" >&5 +$as_echo_n "checking if the linker ($LD) is GNU ld... " >&6; } +if ${lt_cv_prog_gnu_ld+:} false; then : + $as_echo_n "(cached) " >&6 +else + # I'd rather use --version here, but apparently some GNU lds only accept -v. +case `$LD -v 2>&1 &5 +$as_echo "$lt_cv_prog_gnu_ld" >&6; } +with_gnu_ld=$lt_cv_prog_gnu_ld + + + + + + + + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for BSD- or MS-compatible name lister (nm)" >&5 +$as_echo_n "checking for BSD- or MS-compatible name lister (nm)... " >&6; } +if ${lt_cv_path_NM+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$NM"; then + # Let the user override the test. + lt_cv_path_NM="$NM" +else + lt_nm_to_check="${ac_tool_prefix}nm" + if test -n "$ac_tool_prefix" && test "$build" = "$host"; then + lt_nm_to_check="$lt_nm_to_check nm" + fi + for lt_tmp_nm in $lt_nm_to_check; do + lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR + for ac_dir in $PATH /usr/ccs/bin/elf /usr/ccs/bin /usr/ucb /bin; do + IFS="$lt_save_ifs" + test -z "$ac_dir" && ac_dir=. + tmp_nm="$ac_dir/$lt_tmp_nm" + if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext" ; then + # Check to see if the nm accepts a BSD-compat flag. + # Adding the `sed 1q' prevents false positives on HP-UX, which says: + # nm: unknown option "B" ignored + # Tru64's nm complains that /dev/null is an invalid object file + case `"$tmp_nm" -B /dev/null 2>&1 | sed '1q'` in + */dev/null* | *'Invalid file or object type'*) + lt_cv_path_NM="$tmp_nm -B" + break + ;; + *) + case `"$tmp_nm" -p /dev/null 2>&1 | sed '1q'` in + */dev/null*) + lt_cv_path_NM="$tmp_nm -p" + break + ;; + *) + lt_cv_path_NM=${lt_cv_path_NM="$tmp_nm"} # keep the first match, but + continue # so that we can try to find one that supports BSD flags + ;; + esac + ;; + esac + fi + done + IFS="$lt_save_ifs" + done + : ${lt_cv_path_NM=no} +fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_NM" >&5 +$as_echo "$lt_cv_path_NM" >&6; } +if test "$lt_cv_path_NM" != "no"; then + NM="$lt_cv_path_NM" +else + # Didn't find any BSD compatible name lister, look for dumpbin. + if test -n "$DUMPBIN"; then : + # Let the user override the test. + else + if test -n "$ac_tool_prefix"; then + for ac_prog in dumpbin "link -dump" + do + # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. +set dummy $ac_tool_prefix$ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_DUMPBIN+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$DUMPBIN"; then + ac_cv_prog_DUMPBIN="$DUMPBIN" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_DUMPBIN="$ac_tool_prefix$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +DUMPBIN=$ac_cv_prog_DUMPBIN +if test -n "$DUMPBIN"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DUMPBIN" >&5 +$as_echo "$DUMPBIN" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + test -n "$DUMPBIN" && break + done +fi +if test -z "$DUMPBIN"; then + ac_ct_DUMPBIN=$DUMPBIN + for ac_prog in dumpbin "link -dump" +do + # Extract the first word of "$ac_prog", so it can be a program name with args. +set dummy $ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_DUMPBIN+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_DUMPBIN"; then + ac_cv_prog_ac_ct_DUMPBIN="$ac_ct_DUMPBIN" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_DUMPBIN="$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_DUMPBIN=$ac_cv_prog_ac_ct_DUMPBIN +if test -n "$ac_ct_DUMPBIN"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DUMPBIN" >&5 +$as_echo "$ac_ct_DUMPBIN" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + test -n "$ac_ct_DUMPBIN" && break +done + + if test "x$ac_ct_DUMPBIN" = x; then + DUMPBIN=":" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + DUMPBIN=$ac_ct_DUMPBIN + fi +fi + + case `$DUMPBIN -symbols /dev/null 2>&1 | sed '1q'` in + *COFF*) + DUMPBIN="$DUMPBIN -symbols" + ;; + *) + DUMPBIN=: + ;; + esac + fi + + if test "$DUMPBIN" != ":"; then + NM="$DUMPBIN" + fi +fi +test -z "$NM" && NM=nm + + + + + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking the name lister ($NM) interface" >&5 +$as_echo_n "checking the name lister ($NM) interface... " >&6; } +if ${lt_cv_nm_interface+:} false; then : + $as_echo_n "(cached) " >&6 +else + lt_cv_nm_interface="BSD nm" + echo "int some_variable = 0;" > conftest.$ac_ext + (eval echo "\"\$as_me:$LINENO: $ac_compile\"" >&5) + (eval "$ac_compile" 2>conftest.err) + cat conftest.err >&5 + (eval echo "\"\$as_me:$LINENO: $NM \\\"conftest.$ac_objext\\\"\"" >&5) + (eval "$NM \"conftest.$ac_objext\"" 2>conftest.err > conftest.out) + cat conftest.err >&5 + (eval echo "\"\$as_me:$LINENO: output\"" >&5) + cat conftest.out >&5 + if $GREP 'External.*some_variable' conftest.out > /dev/null; then + lt_cv_nm_interface="MS dumpbin" + fi + rm -f conftest* +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_nm_interface" >&5 +$as_echo "$lt_cv_nm_interface" >&6; } + +# find the maximum length of command line arguments +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking the maximum length of command line arguments" >&5 +$as_echo_n "checking the maximum length of command line arguments... " >&6; } +if ${lt_cv_sys_max_cmd_len+:} false; then : + $as_echo_n "(cached) " >&6 +else + i=0 + teststring="ABCD" + + case $build_os in + msdosdjgpp*) + # On DJGPP, this test can blow up pretty badly due to problems in libc + # (any single argument exceeding 2000 bytes causes a buffer overrun + # during glob expansion). Even if it were fixed, the result of this + # check would be larger than it should be. + lt_cv_sys_max_cmd_len=12288; # 12K is about right + ;; + + gnu*) + # Under GNU Hurd, this test is not required because there is + # no limit to the length of command line arguments. + # Libtool will interpret -1 as no limit whatsoever + lt_cv_sys_max_cmd_len=-1; + ;; + + cygwin* | mingw* | cegcc*) + # On Win9x/ME, this test blows up -- it succeeds, but takes + # about 5 minutes as the teststring grows exponentially. + # Worse, since 9x/ME are not pre-emptively multitasking, + # you end up with a "frozen" computer, even though with patience + # the test eventually succeeds (with a max line length of 256k). + # Instead, let's just punt: use the minimum linelength reported by + # all of the supported platforms: 8192 (on NT/2K/XP). + lt_cv_sys_max_cmd_len=8192; + ;; + + mint*) + # On MiNT this can take a long time and run out of memory. + lt_cv_sys_max_cmd_len=8192; + ;; + + amigaos*) + # On AmigaOS with pdksh, this test takes hours, literally. + # So we just punt and use a minimum line length of 8192. + lt_cv_sys_max_cmd_len=8192; + ;; + + netbsd* | freebsd* | openbsd* | darwin* | dragonfly*) + # This has been around since 386BSD, at least. Likely further. + if test -x /sbin/sysctl; then + lt_cv_sys_max_cmd_len=`/sbin/sysctl -n kern.argmax` + elif test -x /usr/sbin/sysctl; then + lt_cv_sys_max_cmd_len=`/usr/sbin/sysctl -n kern.argmax` + else + lt_cv_sys_max_cmd_len=65536 # usable default for all BSDs + fi + # And add a safety zone + lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4` + lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3` + ;; + + interix*) + # We know the value 262144 and hardcode it with a safety zone (like BSD) + lt_cv_sys_max_cmd_len=196608 + ;; + + os2*) + # The test takes a long time on OS/2. + lt_cv_sys_max_cmd_len=8192 + ;; + + osf*) + # Dr. Hans Ekkehard Plesser reports seeing a kernel panic running configure + # due to this test when exec_disable_arg_limit is 1 on Tru64. It is not + # nice to cause kernel panics so lets avoid the loop below. + # First set a reasonable default. + lt_cv_sys_max_cmd_len=16384 + # + if test -x /sbin/sysconfig; then + case `/sbin/sysconfig -q proc exec_disable_arg_limit` in + *1*) lt_cv_sys_max_cmd_len=-1 ;; + esac + fi + ;; + sco3.2v5*) + lt_cv_sys_max_cmd_len=102400 + ;; + sysv5* | sco5v6* | sysv4.2uw2*) + kargmax=`grep ARG_MAX /etc/conf/cf.d/stune 2>/dev/null` + if test -n "$kargmax"; then + lt_cv_sys_max_cmd_len=`echo $kargmax | sed 's/.*[ ]//'` + else + lt_cv_sys_max_cmd_len=32768 + fi + ;; + *) + lt_cv_sys_max_cmd_len=`(getconf ARG_MAX) 2> /dev/null` + if test -n "$lt_cv_sys_max_cmd_len" && \ + test undefined != "$lt_cv_sys_max_cmd_len"; then + lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4` + lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3` + else + # Make teststring a little bigger before we do anything with it. + # a 1K string should be a reasonable start. + for i in 1 2 3 4 5 6 7 8 ; do + teststring=$teststring$teststring + done + SHELL=${SHELL-${CONFIG_SHELL-/bin/sh}} + # If test is not a shell built-in, we'll probably end up computing a + # maximum length that is only half of the actual maximum length, but + # we can't tell. + while { test "X"`env echo "$teststring$teststring" 2>/dev/null` \ + = "X$teststring$teststring"; } >/dev/null 2>&1 && + test $i != 17 # 1/2 MB should be enough + do + i=`expr $i + 1` + teststring=$teststring$teststring + done + # Only check the string length outside the loop. + lt_cv_sys_max_cmd_len=`expr "X$teststring" : ".*" 2>&1` + teststring= + # Add a significant safety factor because C++ compilers can tack on + # massive amounts of additional arguments before passing them to the + # linker. It appears as though 1/2 is a usable value. + lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 2` + fi + ;; + esac + +fi + +if test -n $lt_cv_sys_max_cmd_len ; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sys_max_cmd_len" >&5 +$as_echo "$lt_cv_sys_max_cmd_len" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: none" >&5 +$as_echo "none" >&6; } +fi +max_cmd_len=$lt_cv_sys_max_cmd_len + + + + + + +: ${CP="cp -f"} +: ${MV="mv -f"} +: ${RM="rm -f"} + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the shell understands some XSI constructs" >&5 +$as_echo_n "checking whether the shell understands some XSI constructs... " >&6; } +# Try some XSI features +xsi_shell=no +( _lt_dummy="a/b/c" + test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \ + = c,a/b,b/c, \ + && eval 'test $(( 1 + 1 )) -eq 2 \ + && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \ + && xsi_shell=yes +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $xsi_shell" >&5 +$as_echo "$xsi_shell" >&6; } + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the shell understands \"+=\"" >&5 +$as_echo_n "checking whether the shell understands \"+=\"... " >&6; } +lt_shell_append=no +( foo=bar; set foo baz; eval "$1+=\$2" && test "$foo" = barbaz ) \ + >/dev/null 2>&1 \ + && lt_shell_append=yes +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_shell_append" >&5 +$as_echo "$lt_shell_append" >&6; } + + +if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then + lt_unset=unset +else + lt_unset=false +fi + + + + + +# test EBCDIC or ASCII +case `echo X|tr X '\101'` in + A) # ASCII based system + # \n is not interpreted correctly by Solaris 8 /usr/ucb/tr + lt_SP2NL='tr \040 \012' + lt_NL2SP='tr \015\012 \040\040' + ;; + *) # EBCDIC based system + lt_SP2NL='tr \100 \n' + lt_NL2SP='tr \r\n \100\100' + ;; +esac + + + + + + + + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5 +$as_echo_n "checking how to convert $build file names to $host format... " >&6; } +if ${lt_cv_to_host_file_cmd+:} false; then : + $as_echo_n "(cached) " >&6 +else + case $host in + *-*-mingw* ) + case $build in + *-*-mingw* ) # actually msys + lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32 + ;; + *-*-cygwin* ) + lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32 + ;; + * ) # otherwise, assume *nix + lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32 + ;; + esac + ;; + *-*-cygwin* ) + case $build in + *-*-mingw* ) # actually msys + lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin + ;; + *-*-cygwin* ) + lt_cv_to_host_file_cmd=func_convert_file_noop + ;; + * ) # otherwise, assume *nix + lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin + ;; + esac + ;; + * ) # unhandled hosts (and "normal" native builds) + lt_cv_to_host_file_cmd=func_convert_file_noop + ;; +esac + +fi + +to_host_file_cmd=$lt_cv_to_host_file_cmd +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5 +$as_echo "$lt_cv_to_host_file_cmd" >&6; } + + + + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5 +$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; } +if ${lt_cv_to_tool_file_cmd+:} false; then : + $as_echo_n "(cached) " >&6 +else + #assume ordinary cross tools, or native build. +lt_cv_to_tool_file_cmd=func_convert_file_noop +case $host in + *-*-mingw* ) + case $build in + *-*-mingw* ) # actually msys + lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32 + ;; + esac + ;; +esac + +fi + +to_tool_file_cmd=$lt_cv_to_tool_file_cmd +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5 +$as_echo "$lt_cv_to_tool_file_cmd" >&6; } + + + + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5 +$as_echo_n "checking for $LD option to reload object files... " >&6; } +if ${lt_cv_ld_reload_flag+:} false; then : + $as_echo_n "(cached) " >&6 +else + lt_cv_ld_reload_flag='-r' +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_reload_flag" >&5 +$as_echo "$lt_cv_ld_reload_flag" >&6; } +reload_flag=$lt_cv_ld_reload_flag +case $reload_flag in +"" | " "*) ;; +*) reload_flag=" $reload_flag" ;; +esac +reload_cmds='$LD$reload_flag -o $output$reload_objs' +case $host_os in + cygwin* | mingw* | pw32* | cegcc*) + if test "$GCC" != yes; then + reload_cmds=false + fi + ;; + darwin*) + if test "$GCC" = yes; then + reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs' + else + reload_cmds='$LD$reload_flag -o $output$reload_objs' + fi + ;; +esac + + + + + + + + + +if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}objdump", so it can be a program name with args. +set dummy ${ac_tool_prefix}objdump; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_OBJDUMP+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$OBJDUMP"; then + ac_cv_prog_OBJDUMP="$OBJDUMP" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_OBJDUMP="${ac_tool_prefix}objdump" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +OBJDUMP=$ac_cv_prog_OBJDUMP +if test -n "$OBJDUMP"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OBJDUMP" >&5 +$as_echo "$OBJDUMP" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_OBJDUMP"; then + ac_ct_OBJDUMP=$OBJDUMP + # Extract the first word of "objdump", so it can be a program name with args. +set dummy objdump; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_OBJDUMP+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_OBJDUMP"; then + ac_cv_prog_ac_ct_OBJDUMP="$ac_ct_OBJDUMP" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_OBJDUMP="objdump" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_OBJDUMP=$ac_cv_prog_ac_ct_OBJDUMP +if test -n "$ac_ct_OBJDUMP"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OBJDUMP" >&5 +$as_echo "$ac_ct_OBJDUMP" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + if test "x$ac_ct_OBJDUMP" = x; then + OBJDUMP="false" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + OBJDUMP=$ac_ct_OBJDUMP + fi +else + OBJDUMP="$ac_cv_prog_OBJDUMP" +fi + +test -z "$OBJDUMP" && OBJDUMP=objdump + + + + + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to recognize dependent libraries" >&5 +$as_echo_n "checking how to recognize dependent libraries... " >&6; } +if ${lt_cv_deplibs_check_method+:} false; then : + $as_echo_n "(cached) " >&6 +else + lt_cv_file_magic_cmd='$MAGIC_CMD' +lt_cv_file_magic_test_file= +lt_cv_deplibs_check_method='unknown' +# Need to set the preceding variable on all platforms that support +# interlibrary dependencies. +# 'none' -- dependencies not supported. +# `unknown' -- same as none, but documents that we really don't know. +# 'pass_all' -- all dependencies passed with no checks. +# 'test_compile' -- check by making test program. +# 'file_magic [[regex]]' -- check by looking for files in library path +# which responds to the $file_magic_cmd with a given extended regex. +# If you have `file' or equivalent on your system and you're not sure +# whether `pass_all' will *always* work, you probably want this one. + +case $host_os in +aix[4-9]*) + lt_cv_deplibs_check_method=pass_all + ;; + +beos*) + lt_cv_deplibs_check_method=pass_all + ;; + +bsdi[45]*) + lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (shared object|dynamic lib)' + lt_cv_file_magic_cmd='/usr/bin/file -L' + lt_cv_file_magic_test_file=/shlib/libc.so + ;; + +cygwin*) + # func_win32_libid is a shell function defined in ltmain.sh + lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' + lt_cv_file_magic_cmd='func_win32_libid' + ;; + +mingw* | pw32*) + # Base MSYS/MinGW do not provide the 'file' command needed by + # func_win32_libid shell function, so use a weaker test based on 'objdump', + # unless we find 'file', for example because we are cross-compiling. + # func_win32_libid assumes BSD nm, so disallow it if using MS dumpbin. + if ( test "$lt_cv_nm_interface" = "BSD nm" && file / ) >/dev/null 2>&1; then + lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' + lt_cv_file_magic_cmd='func_win32_libid' + else + # Keep this pattern in sync with the one in func_win32_libid. + lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' + lt_cv_file_magic_cmd='$OBJDUMP -f' + fi + ;; + +cegcc*) + # use the weaker test based on 'objdump'. See mingw*. + lt_cv_deplibs_check_method='file_magic file format pe-arm-.*little(.*architecture: arm)?' + lt_cv_file_magic_cmd='$OBJDUMP -f' + ;; + +darwin* | rhapsody*) + lt_cv_deplibs_check_method=pass_all + ;; + +freebsd* | dragonfly*) + if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then + case $host_cpu in + i*86 ) + # Not sure whether the presence of OpenBSD here was a mistake. + # Let's accept both of them until this is cleared up. + lt_cv_deplibs_check_method='file_magic (FreeBSD|OpenBSD|DragonFly)/i[3-9]86 (compact )?demand paged shared library' + lt_cv_file_magic_cmd=/usr/bin/file + lt_cv_file_magic_test_file=`echo /usr/lib/libc.so.*` + ;; + esac + else + lt_cv_deplibs_check_method=pass_all + fi + ;; + +haiku*) + lt_cv_deplibs_check_method=pass_all + ;; + +hpux10.20* | hpux11*) + lt_cv_file_magic_cmd=/usr/bin/file + case $host_cpu in + ia64*) + lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF-[0-9][0-9]) shared object file - IA64' + lt_cv_file_magic_test_file=/usr/lib/hpux32/libc.so + ;; + hppa*64*) + lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF[ -][0-9][0-9])(-bit)?( [LM]SB)? shared object( file)?[, -]* PA-RISC [0-9]\.[0-9]' + lt_cv_file_magic_test_file=/usr/lib/pa20_64/libc.sl + ;; + *) + lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|PA-RISC[0-9]\.[0-9]) shared library' + lt_cv_file_magic_test_file=/usr/lib/libc.sl + ;; + esac + ;; + +interix[3-9]*) + # PIC code is broken on Interix 3.x, that's why |\.a not |_pic\.a here + lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so|\.a)$' + ;; + +irix5* | irix6* | nonstopux*) + case $LD in + *-32|*"-32 ") libmagic=32-bit;; + *-n32|*"-n32 ") libmagic=N32;; + *-64|*"-64 ") libmagic=64-bit;; + *) libmagic=never-match;; + esac + lt_cv_deplibs_check_method=pass_all + ;; + +# This must be glibc/ELF. +linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) + lt_cv_deplibs_check_method=pass_all + ;; + +netbsd* | netbsdelf*-gnu) + if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then + lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|_pic\.a)$' + else + lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so|_pic\.a)$' + fi + ;; + +newos6*) + lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (executable|dynamic lib)' + lt_cv_file_magic_cmd=/usr/bin/file + lt_cv_file_magic_test_file=/usr/lib/libnls.so + ;; + +*nto* | *qnx*) + lt_cv_deplibs_check_method=pass_all + ;; + +openbsd*) + if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then + lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|\.so|_pic\.a)$' + else + lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|_pic\.a)$' + fi + ;; + +osf3* | osf4* | osf5*) + lt_cv_deplibs_check_method=pass_all + ;; + +rdos*) + lt_cv_deplibs_check_method=pass_all + ;; + +solaris*) + lt_cv_deplibs_check_method=pass_all + ;; + +sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) + lt_cv_deplibs_check_method=pass_all + ;; + +sysv4 | sysv4.3*) + case $host_vendor in + motorola) + lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (shared object|dynamic lib) M[0-9][0-9]* Version [0-9]' + lt_cv_file_magic_test_file=`echo /usr/lib/libc.so*` + ;; + ncr) + lt_cv_deplibs_check_method=pass_all + ;; + sequent) + lt_cv_file_magic_cmd='/bin/file' + lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [LM]SB (shared object|dynamic lib )' + ;; + sni) + lt_cv_file_magic_cmd='/bin/file' + lt_cv_deplibs_check_method="file_magic ELF [0-9][0-9]*-bit [LM]SB dynamic lib" + lt_cv_file_magic_test_file=/lib/libc.so + ;; + siemens) + lt_cv_deplibs_check_method=pass_all + ;; + pc) + lt_cv_deplibs_check_method=pass_all + ;; + esac + ;; + +tpf*) + lt_cv_deplibs_check_method=pass_all + ;; +esac + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5 +$as_echo "$lt_cv_deplibs_check_method" >&6; } + +file_magic_glob= +want_nocaseglob=no +if test "$build" = "$host"; then + case $host_os in + mingw* | pw32*) + if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then + want_nocaseglob=yes + else + file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"` + fi + ;; + esac +fi + +file_magic_cmd=$lt_cv_file_magic_cmd +deplibs_check_method=$lt_cv_deplibs_check_method +test -z "$deplibs_check_method" && deplibs_check_method=unknown + + + + + + + + + + + + + + + + + + + + + + +if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args. +set dummy ${ac_tool_prefix}dlltool; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_DLLTOOL+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$DLLTOOL"; then + ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +DLLTOOL=$ac_cv_prog_DLLTOOL +if test -n "$DLLTOOL"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5 +$as_echo "$DLLTOOL" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_DLLTOOL"; then + ac_ct_DLLTOOL=$DLLTOOL + # Extract the first word of "dlltool", so it can be a program name with args. +set dummy dlltool; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_DLLTOOL+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_DLLTOOL"; then + ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_DLLTOOL="dlltool" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL +if test -n "$ac_ct_DLLTOOL"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5 +$as_echo "$ac_ct_DLLTOOL" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + if test "x$ac_ct_DLLTOOL" = x; then + DLLTOOL="false" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + DLLTOOL=$ac_ct_DLLTOOL + fi +else + DLLTOOL="$ac_cv_prog_DLLTOOL" +fi + +test -z "$DLLTOOL" && DLLTOOL=dlltool + + + + + + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5 +$as_echo_n "checking how to associate runtime and link libraries... " >&6; } +if ${lt_cv_sharedlib_from_linklib_cmd+:} false; then : + $as_echo_n "(cached) " >&6 +else + lt_cv_sharedlib_from_linklib_cmd='unknown' + +case $host_os in +cygwin* | mingw* | pw32* | cegcc*) + # two different shell functions defined in ltmain.sh + # decide which to use based on capabilities of $DLLTOOL + case `$DLLTOOL --help 2>&1` in + *--identify-strict*) + lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib + ;; + *) + lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback + ;; + esac + ;; +*) + # fallback: assume linklib IS sharedlib + lt_cv_sharedlib_from_linklib_cmd="$ECHO" + ;; +esac + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5 +$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; } +sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd +test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO + + + + + + + +if test -n "$ac_tool_prefix"; then + for ac_prog in ar + do + # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. +set dummy $ac_tool_prefix$ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_AR+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$AR"; then + ac_cv_prog_AR="$AR" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_AR="$ac_tool_prefix$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +AR=$ac_cv_prog_AR +if test -n "$AR"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AR" >&5 +$as_echo "$AR" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + test -n "$AR" && break + done +fi +if test -z "$AR"; then + ac_ct_AR=$AR + for ac_prog in ar +do + # Extract the first word of "$ac_prog", so it can be a program name with args. +set dummy $ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_AR+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_AR"; then + ac_cv_prog_ac_ct_AR="$ac_ct_AR" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_AR="$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_AR=$ac_cv_prog_ac_ct_AR +if test -n "$ac_ct_AR"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_AR" >&5 +$as_echo "$ac_ct_AR" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + test -n "$ac_ct_AR" && break +done + + if test "x$ac_ct_AR" = x; then + AR="false" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + AR=$ac_ct_AR + fi +fi + +: ${AR=ar} +: ${AR_FLAGS=cru} + + + + + + + + + + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5 +$as_echo_n "checking for archiver @FILE support... " >&6; } +if ${lt_cv_ar_at_file+:} false; then : + $as_echo_n "(cached) " >&6 +else + lt_cv_ar_at_file=no + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + echo conftest.$ac_objext > conftest.lst + lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5' + { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 + (eval $lt_ar_try) 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } + if test "$ac_status" -eq 0; then + # Ensure the archiver fails upon bogus file names. + rm -f conftest.$ac_objext libconftest.a + { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 + (eval $lt_ar_try) 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } + if test "$ac_status" -ne 0; then + lt_cv_ar_at_file=@ + fi + fi + rm -f conftest.* libconftest.a + +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5 +$as_echo "$lt_cv_ar_at_file" >&6; } + +if test "x$lt_cv_ar_at_file" = xno; then + archiver_list_spec= +else + archiver_list_spec=$lt_cv_ar_at_file +fi + + + + + + + +if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args. +set dummy ${ac_tool_prefix}strip; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_STRIP+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$STRIP"; then + ac_cv_prog_STRIP="$STRIP" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_STRIP="${ac_tool_prefix}strip" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +STRIP=$ac_cv_prog_STRIP +if test -n "$STRIP"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $STRIP" >&5 +$as_echo "$STRIP" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_STRIP"; then + ac_ct_STRIP=$STRIP + # Extract the first word of "strip", so it can be a program name with args. +set dummy strip; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_STRIP+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_STRIP"; then + ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_STRIP="strip" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP +if test -n "$ac_ct_STRIP"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_STRIP" >&5 +$as_echo "$ac_ct_STRIP" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + if test "x$ac_ct_STRIP" = x; then + STRIP=":" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + STRIP=$ac_ct_STRIP + fi +else + STRIP="$ac_cv_prog_STRIP" +fi + +test -z "$STRIP" && STRIP=: + + + + + + +if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}ranlib", so it can be a program name with args. +set dummy ${ac_tool_prefix}ranlib; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_RANLIB+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$RANLIB"; then + ac_cv_prog_RANLIB="$RANLIB" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_RANLIB="${ac_tool_prefix}ranlib" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +RANLIB=$ac_cv_prog_RANLIB +if test -n "$RANLIB"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $RANLIB" >&5 +$as_echo "$RANLIB" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_RANLIB"; then + ac_ct_RANLIB=$RANLIB + # Extract the first word of "ranlib", so it can be a program name with args. +set dummy ranlib; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_RANLIB+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_RANLIB"; then + ac_cv_prog_ac_ct_RANLIB="$ac_ct_RANLIB" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_RANLIB="ranlib" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_RANLIB=$ac_cv_prog_ac_ct_RANLIB +if test -n "$ac_ct_RANLIB"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_RANLIB" >&5 +$as_echo "$ac_ct_RANLIB" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + if test "x$ac_ct_RANLIB" = x; then + RANLIB=":" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + RANLIB=$ac_ct_RANLIB + fi +else + RANLIB="$ac_cv_prog_RANLIB" +fi + +test -z "$RANLIB" && RANLIB=: + + + + + + +# Determine commands to create old-style static archives. +old_archive_cmds='$AR $AR_FLAGS $oldlib$oldobjs' +old_postinstall_cmds='chmod 644 $oldlib' +old_postuninstall_cmds= + +if test -n "$RANLIB"; then + case $host_os in + openbsd*) + old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB -t \$tool_oldlib" + ;; + *) + old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB \$tool_oldlib" + ;; + esac + old_archive_cmds="$old_archive_cmds~\$RANLIB \$tool_oldlib" +fi + +case $host_os in + darwin*) + lock_old_archive_extraction=yes ;; + *) + lock_old_archive_extraction=no ;; +esac + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +# If no C compiler was specified, use CC. +LTCC=${LTCC-"$CC"} + +# If no C compiler flags were specified, use CFLAGS. +LTCFLAGS=${LTCFLAGS-"$CFLAGS"} + +# Allow CC to be a program name with arguments. +compiler=$CC + + +# Check for command to grab the raw symbol name followed by C symbol from nm. +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking command to parse $NM output from $compiler object" >&5 +$as_echo_n "checking command to parse $NM output from $compiler object... " >&6; } +if ${lt_cv_sys_global_symbol_pipe+:} false; then : + $as_echo_n "(cached) " >&6 +else + +# These are sane defaults that work on at least a few old systems. +# [They come from Ultrix. What could be older than Ultrix?!! ;)] + +# Character class describing NM global symbol codes. +symcode='[BCDEGRST]' + +# Regexp to match symbols that can be accessed directly from C. +sympat='\([_A-Za-z][_A-Za-z0-9]*\)' + +# Define system-specific variables. +case $host_os in +aix*) + symcode='[BCDT]' + ;; +cygwin* | mingw* | pw32* | cegcc*) + symcode='[ABCDGISTW]' + ;; +hpux*) + if test "$host_cpu" = ia64; then + symcode='[ABCDEGRST]' + fi + ;; +irix* | nonstopux*) + symcode='[BCDEGRST]' + ;; +osf*) + symcode='[BCDEGQRST]' + ;; +solaris*) + symcode='[BDRT]' + ;; +sco3.2v5*) + symcode='[DT]' + ;; +sysv4.2uw2*) + symcode='[DT]' + ;; +sysv5* | sco5v6* | unixware* | OpenUNIX*) + symcode='[ABDT]' + ;; +sysv4) + symcode='[DFNSTU]' + ;; +esac + +# If we're using GNU nm, then use its standard symbol codes. +case `$NM -V 2>&1` in +*GNU* | *'with BFD'*) + symcode='[ABCDGIRSTW]' ;; +esac + +# Transform an extracted symbol line into a proper C declaration. +# Some systems (esp. on ia64) link data and code symbols differently, +# so use this general approach. +lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'" + +# Transform an extracted symbol line into symbol name and symbol address +lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'" +lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'" + +# Handle CRLF in mingw tool chain +opt_cr= +case $build_os in +mingw*) + opt_cr=`$ECHO 'x\{0,1\}' | tr x '\015'` # option cr in regexp + ;; +esac + +# Try without a prefix underscore, then with it. +for ac_symprfx in "" "_"; do + + # Transform symcode, sympat, and symprfx into a raw symbol and a C symbol. + symxfrm="\\1 $ac_symprfx\\2 \\2" + + # Write the raw and C identifiers. + if test "$lt_cv_nm_interface" = "MS dumpbin"; then + # Fake it for dumpbin and say T for any non-static function + # and D for any global variable. + # Also find C++ and __fastcall symbols from MSVC++, + # which start with @ or ?. + lt_cv_sys_global_symbol_pipe="$AWK '"\ +" {last_section=section; section=\$ 3};"\ +" /^COFF SYMBOL TABLE/{for(i in hide) delete hide[i]};"\ +" /Section length .*#relocs.*(pick any)/{hide[last_section]=1};"\ +" \$ 0!~/External *\|/{next};"\ +" / 0+ UNDEF /{next}; / UNDEF \([^|]\)*()/{next};"\ +" {if(hide[section]) next};"\ +" {f=0}; \$ 0~/\(\).*\|/{f=1}; {printf f ? \"T \" : \"D \"};"\ +" {split(\$ 0, a, /\||\r/); split(a[2], s)};"\ +" s[1]~/^[@?]/{print s[1], s[1]; next};"\ +" s[1]~prfx {split(s[1],t,\"@\"); print t[1], substr(t[1],length(prfx))}"\ +" ' prfx=^$ac_symprfx" + else + lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'" + fi + lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'" + + # Check to see that the pipe works correctly. + pipe_works=no + + rm -f conftest* + cat > conftest.$ac_ext <<_LT_EOF +#ifdef __cplusplus +extern "C" { +#endif +char nm_test_var; +void nm_test_func(void); +void nm_test_func(void){} +#ifdef __cplusplus +} +#endif +int main(){nm_test_var='a';nm_test_func();return(0);} +_LT_EOF + + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 + (eval $ac_compile) 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + # Now try to grab the symbols. + nlist=conftest.nm + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$NM conftest.$ac_objext \| "$lt_cv_sys_global_symbol_pipe" \> $nlist\""; } >&5 + (eval $NM conftest.$ac_objext \| "$lt_cv_sys_global_symbol_pipe" \> $nlist) 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && test -s "$nlist"; then + # Try sorting and uniquifying the output. + if sort "$nlist" | uniq > "$nlist"T; then + mv -f "$nlist"T "$nlist" + else + rm -f "$nlist"T + fi + + # Make sure that we snagged all the symbols we need. + if $GREP ' nm_test_var$' "$nlist" >/dev/null; then + if $GREP ' nm_test_func$' "$nlist" >/dev/null; then + cat <<_LT_EOF > conftest.$ac_ext +/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */ +#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE) +/* DATA imports from DLLs on WIN32 con't be const, because runtime + relocations are performed -- see ld's documentation on pseudo-relocs. */ +# define LT_DLSYM_CONST +#elif defined(__osf__) +/* This system does not cope well with relocations in const data. */ +# define LT_DLSYM_CONST +#else +# define LT_DLSYM_CONST const +#endif + +#ifdef __cplusplus +extern "C" { +#endif + +_LT_EOF + # Now generate the symbol file. + eval "$lt_cv_sys_global_symbol_to_cdecl"' < "$nlist" | $GREP -v main >> conftest.$ac_ext' + + cat <<_LT_EOF >> conftest.$ac_ext + +/* The mapping between symbol names and symbols. */ +LT_DLSYM_CONST struct { + const char *name; + void *address; +} +lt__PROGRAM__LTX_preloaded_symbols[] = +{ + { "@PROGRAM@", (void *) 0 }, +_LT_EOF + $SED "s/^$symcode$symcode* \(.*\) \(.*\)$/ {\"\2\", (void *) \&\2},/" < "$nlist" | $GREP -v main >> conftest.$ac_ext + cat <<\_LT_EOF >> conftest.$ac_ext + {0, (void *) 0} +}; + +/* This works around a problem in FreeBSD linker */ +#ifdef FREEBSD_WORKAROUND +static const void *lt_preloaded_setup() { + return lt__PROGRAM__LTX_preloaded_symbols; +} +#endif + +#ifdef __cplusplus +} +#endif +_LT_EOF + # Now try linking the two files. + mv conftest.$ac_objext conftstm.$ac_objext + lt_globsym_save_LIBS=$LIBS + lt_globsym_save_CFLAGS=$CFLAGS + LIBS="conftstm.$ac_objext" + CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag" + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 + (eval $ac_link) 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && test -s conftest${ac_exeext}; then + pipe_works=yes + fi + LIBS=$lt_globsym_save_LIBS + CFLAGS=$lt_globsym_save_CFLAGS + else + echo "cannot find nm_test_func in $nlist" >&5 + fi + else + echo "cannot find nm_test_var in $nlist" >&5 + fi + else + echo "cannot run $lt_cv_sys_global_symbol_pipe" >&5 + fi + else + echo "$progname: failed program was:" >&5 + cat conftest.$ac_ext >&5 + fi + rm -rf conftest* conftst* + + # Do not use the global_symbol_pipe unless it works. + if test "$pipe_works" = yes; then + break + else + lt_cv_sys_global_symbol_pipe= + fi +done + +fi + +if test -z "$lt_cv_sys_global_symbol_pipe"; then + lt_cv_sys_global_symbol_to_cdecl= +fi +if test -z "$lt_cv_sys_global_symbol_pipe$lt_cv_sys_global_symbol_to_cdecl"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: failed" >&5 +$as_echo "failed" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: ok" >&5 +$as_echo "ok" >&6; } +fi + +# Response file support. +if test "$lt_cv_nm_interface" = "MS dumpbin"; then + nm_file_list_spec='@' +elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then + nm_file_list_spec='@' +fi + + + + + + + + + + + + + + + + + + + + + + + + + + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5 +$as_echo_n "checking for sysroot... " >&6; } + +# Check whether --with-sysroot was given. +if test "${with_sysroot+set}" = set; then : + withval=$with_sysroot; +else + with_sysroot=no +fi + + +lt_sysroot= +case ${with_sysroot} in #( + yes) + if test "$GCC" = yes; then + lt_sysroot=`$CC --print-sysroot 2>/dev/null` + fi + ;; #( + /*) + lt_sysroot=`echo "$with_sysroot" | sed -e "$sed_quote_subst"` + ;; #( + no|'') + ;; #( + *) + { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_sysroot}" >&5 +$as_echo "${with_sysroot}" >&6; } + as_fn_error $? "The sysroot must be an absolute path." "$LINENO" 5 + ;; +esac + + { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5 +$as_echo "${lt_sysroot:-no}" >&6; } + + + + + +# Check whether --enable-libtool-lock was given. +if test "${enable_libtool_lock+set}" = set; then : + enableval=$enable_libtool_lock; +fi + +test "x$enable_libtool_lock" != xno && enable_libtool_lock=yes + +# Some flags need to be propagated to the compiler or linker for good +# libtool support. +case $host in +ia64-*-hpux*) + # Find out which ABI we are using. + echo 'int i;' > conftest.$ac_ext + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 + (eval $ac_compile) 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + case `/usr/bin/file conftest.$ac_objext` in + *ELF-32*) + HPUX_IA64_MODE="32" + ;; + *ELF-64*) + HPUX_IA64_MODE="64" + ;; + esac + fi + rm -rf conftest* + ;; +*-*-irix6*) + # Find out which ABI we are using. + echo '#line '$LINENO' "configure"' > conftest.$ac_ext + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 + (eval $ac_compile) 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + if test "$lt_cv_prog_gnu_ld" = yes; then + case `/usr/bin/file conftest.$ac_objext` in + *32-bit*) + LD="${LD-ld} -melf32bsmip" + ;; + *N32*) + LD="${LD-ld} -melf32bmipn32" + ;; + *64-bit*) + LD="${LD-ld} -melf64bmip" + ;; + esac + else + case `/usr/bin/file conftest.$ac_objext` in + *32-bit*) + LD="${LD-ld} -32" + ;; + *N32*) + LD="${LD-ld} -n32" + ;; + *64-bit*) + LD="${LD-ld} -64" + ;; + esac + fi + fi + rm -rf conftest* + ;; + +x86_64-*kfreebsd*-gnu|x86_64-*linux*|powerpc*-*linux*| \ +s390*-*linux*|s390*-*tpf*|sparc*-*linux*) + # Find out which ABI we are using. + echo 'int i;' > conftest.$ac_ext + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 + (eval $ac_compile) 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + case `/usr/bin/file conftest.o` in + *32-bit*) + case $host in + x86_64-*kfreebsd*-gnu) + LD="${LD-ld} -m elf_i386_fbsd" + ;; + x86_64-*linux*) + case `/usr/bin/file conftest.o` in + *x86-64*) + LD="${LD-ld} -m elf32_x86_64" + ;; + *) + LD="${LD-ld} -m elf_i386" + ;; + esac + ;; + powerpc64le-*) + LD="${LD-ld} -m elf32lppclinux" + ;; + powerpc64-*) + LD="${LD-ld} -m elf32ppclinux" + ;; + s390x-*linux*) + LD="${LD-ld} -m elf_s390" + ;; + sparc64-*linux*) + LD="${LD-ld} -m elf32_sparc" + ;; + esac + ;; + *64-bit*) + case $host in + x86_64-*kfreebsd*-gnu) + LD="${LD-ld} -m elf_x86_64_fbsd" + ;; + x86_64-*linux*) + LD="${LD-ld} -m elf_x86_64" + ;; + powerpcle-*) + LD="${LD-ld} -m elf64lppc" + ;; + powerpc-*) + LD="${LD-ld} -m elf64ppc" + ;; + s390*-*linux*|s390*-*tpf*) + LD="${LD-ld} -m elf64_s390" + ;; + sparc*-*linux*) + LD="${LD-ld} -m elf64_sparc" + ;; + esac + ;; + esac + fi + rm -rf conftest* + ;; + +*-*-sco3.2v5*) + # On SCO OpenServer 5, we need -belf to get full-featured binaries. + SAVE_CFLAGS="$CFLAGS" + CFLAGS="$CFLAGS -belf" + { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler needs -belf" >&5 +$as_echo_n "checking whether the C compiler needs -belf... " >&6; } +if ${lt_cv_cc_needs_belf+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + lt_cv_cc_needs_belf=yes +else + lt_cv_cc_needs_belf=no +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext + ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_cc_needs_belf" >&5 +$as_echo "$lt_cv_cc_needs_belf" >&6; } + if test x"$lt_cv_cc_needs_belf" != x"yes"; then + # this is probably gcc 2.8.0, egcs 1.0 or newer; no need for -belf + CFLAGS="$SAVE_CFLAGS" + fi + ;; +*-*solaris*) + # Find out which ABI we are using. + echo 'int i;' > conftest.$ac_ext + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 + (eval $ac_compile) 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + case `/usr/bin/file conftest.o` in + *64-bit*) + case $lt_cv_prog_gnu_ld in + yes*) + case $host in + i?86-*-solaris*) + LD="${LD-ld} -m elf_x86_64" + ;; + sparc*-*-solaris*) + LD="${LD-ld} -m elf64_sparc" + ;; + esac + # GNU ld 2.21 introduced _sol2 emulations. Use them if available. + if ${LD-ld} -V | grep _sol2 >/dev/null 2>&1; then + LD="${LD-ld}_sol2" + fi + ;; + *) + if ${LD-ld} -64 -r -o conftest2.o conftest.o >/dev/null 2>&1; then + LD="${LD-ld} -64" + fi + ;; + esac + ;; + esac + fi + rm -rf conftest* + ;; +esac + +need_locks="$enable_libtool_lock" + +if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args. +set dummy ${ac_tool_prefix}mt; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_MANIFEST_TOOL+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$MANIFEST_TOOL"; then + ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL +if test -n "$MANIFEST_TOOL"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5 +$as_echo "$MANIFEST_TOOL" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_MANIFEST_TOOL"; then + ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL + # Extract the first word of "mt", so it can be a program name with args. +set dummy mt; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_MANIFEST_TOOL+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_MANIFEST_TOOL"; then + ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_MANIFEST_TOOL="mt" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL +if test -n "$ac_ct_MANIFEST_TOOL"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5 +$as_echo "$ac_ct_MANIFEST_TOOL" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + if test "x$ac_ct_MANIFEST_TOOL" = x; then + MANIFEST_TOOL=":" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL + fi +else + MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL" +fi + +test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5 +$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; } +if ${lt_cv_path_mainfest_tool+:} false; then : + $as_echo_n "(cached) " >&6 +else + lt_cv_path_mainfest_tool=no + echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5 + $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out + cat conftest.err >&5 + if $GREP 'Manifest Tool' conftest.out > /dev/null; then + lt_cv_path_mainfest_tool=yes + fi + rm -f conftest* +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5 +$as_echo "$lt_cv_path_mainfest_tool" >&6; } +if test "x$lt_cv_path_mainfest_tool" != xyes; then + MANIFEST_TOOL=: +fi + + + + + + + case $host_os in + rhapsody* | darwin*) + if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}dsymutil", so it can be a program name with args. +set dummy ${ac_tool_prefix}dsymutil; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_DSYMUTIL+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$DSYMUTIL"; then + ac_cv_prog_DSYMUTIL="$DSYMUTIL" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_DSYMUTIL="${ac_tool_prefix}dsymutil" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +DSYMUTIL=$ac_cv_prog_DSYMUTIL +if test -n "$DSYMUTIL"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DSYMUTIL" >&5 +$as_echo "$DSYMUTIL" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_DSYMUTIL"; then + ac_ct_DSYMUTIL=$DSYMUTIL + # Extract the first word of "dsymutil", so it can be a program name with args. +set dummy dsymutil; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_DSYMUTIL+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_DSYMUTIL"; then + ac_cv_prog_ac_ct_DSYMUTIL="$ac_ct_DSYMUTIL" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_DSYMUTIL="dsymutil" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_DSYMUTIL=$ac_cv_prog_ac_ct_DSYMUTIL +if test -n "$ac_ct_DSYMUTIL"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DSYMUTIL" >&5 +$as_echo "$ac_ct_DSYMUTIL" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + if test "x$ac_ct_DSYMUTIL" = x; then + DSYMUTIL=":" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + DSYMUTIL=$ac_ct_DSYMUTIL + fi +else + DSYMUTIL="$ac_cv_prog_DSYMUTIL" +fi + + if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}nmedit", so it can be a program name with args. +set dummy ${ac_tool_prefix}nmedit; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_NMEDIT+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$NMEDIT"; then + ac_cv_prog_NMEDIT="$NMEDIT" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_NMEDIT="${ac_tool_prefix}nmedit" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +NMEDIT=$ac_cv_prog_NMEDIT +if test -n "$NMEDIT"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $NMEDIT" >&5 +$as_echo "$NMEDIT" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_NMEDIT"; then + ac_ct_NMEDIT=$NMEDIT + # Extract the first word of "nmedit", so it can be a program name with args. +set dummy nmedit; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_NMEDIT+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_NMEDIT"; then + ac_cv_prog_ac_ct_NMEDIT="$ac_ct_NMEDIT" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_NMEDIT="nmedit" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_NMEDIT=$ac_cv_prog_ac_ct_NMEDIT +if test -n "$ac_ct_NMEDIT"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_NMEDIT" >&5 +$as_echo "$ac_ct_NMEDIT" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + if test "x$ac_ct_NMEDIT" = x; then + NMEDIT=":" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + NMEDIT=$ac_ct_NMEDIT + fi +else + NMEDIT="$ac_cv_prog_NMEDIT" +fi + + if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}lipo", so it can be a program name with args. +set dummy ${ac_tool_prefix}lipo; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_LIPO+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$LIPO"; then + ac_cv_prog_LIPO="$LIPO" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_LIPO="${ac_tool_prefix}lipo" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +LIPO=$ac_cv_prog_LIPO +if test -n "$LIPO"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LIPO" >&5 +$as_echo "$LIPO" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_LIPO"; then + ac_ct_LIPO=$LIPO + # Extract the first word of "lipo", so it can be a program name with args. +set dummy lipo; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_LIPO+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_LIPO"; then + ac_cv_prog_ac_ct_LIPO="$ac_ct_LIPO" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_LIPO="lipo" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_LIPO=$ac_cv_prog_ac_ct_LIPO +if test -n "$ac_ct_LIPO"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_LIPO" >&5 +$as_echo "$ac_ct_LIPO" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + if test "x$ac_ct_LIPO" = x; then + LIPO=":" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + LIPO=$ac_ct_LIPO + fi +else + LIPO="$ac_cv_prog_LIPO" +fi + + if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}otool", so it can be a program name with args. +set dummy ${ac_tool_prefix}otool; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_OTOOL+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$OTOOL"; then + ac_cv_prog_OTOOL="$OTOOL" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_OTOOL="${ac_tool_prefix}otool" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +OTOOL=$ac_cv_prog_OTOOL +if test -n "$OTOOL"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OTOOL" >&5 +$as_echo "$OTOOL" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_OTOOL"; then + ac_ct_OTOOL=$OTOOL + # Extract the first word of "otool", so it can be a program name with args. +set dummy otool; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_OTOOL+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_OTOOL"; then + ac_cv_prog_ac_ct_OTOOL="$ac_ct_OTOOL" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_OTOOL="otool" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_OTOOL=$ac_cv_prog_ac_ct_OTOOL +if test -n "$ac_ct_OTOOL"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OTOOL" >&5 +$as_echo "$ac_ct_OTOOL" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + if test "x$ac_ct_OTOOL" = x; then + OTOOL=":" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + OTOOL=$ac_ct_OTOOL + fi +else + OTOOL="$ac_cv_prog_OTOOL" +fi + + if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}otool64", so it can be a program name with args. +set dummy ${ac_tool_prefix}otool64; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_OTOOL64+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$OTOOL64"; then + ac_cv_prog_OTOOL64="$OTOOL64" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_OTOOL64="${ac_tool_prefix}otool64" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +OTOOL64=$ac_cv_prog_OTOOL64 +if test -n "$OTOOL64"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OTOOL64" >&5 +$as_echo "$OTOOL64" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_OTOOL64"; then + ac_ct_OTOOL64=$OTOOL64 + # Extract the first word of "otool64", so it can be a program name with args. +set dummy otool64; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_OTOOL64+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_OTOOL64"; then + ac_cv_prog_ac_ct_OTOOL64="$ac_ct_OTOOL64" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_OTOOL64="otool64" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_OTOOL64=$ac_cv_prog_ac_ct_OTOOL64 +if test -n "$ac_ct_OTOOL64"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OTOOL64" >&5 +$as_echo "$ac_ct_OTOOL64" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + if test "x$ac_ct_OTOOL64" = x; then + OTOOL64=":" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + OTOOL64=$ac_ct_OTOOL64 + fi +else + OTOOL64="$ac_cv_prog_OTOOL64" +fi + + + + + + + + + + + + + + + + + + + + + + + + + + + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -single_module linker flag" >&5 +$as_echo_n "checking for -single_module linker flag... " >&6; } +if ${lt_cv_apple_cc_single_mod+:} false; then : + $as_echo_n "(cached) " >&6 +else + lt_cv_apple_cc_single_mod=no + if test -z "${LT_MULTI_MODULE}"; then + # By default we will add the -single_module flag. You can override + # by either setting the environment variable LT_MULTI_MODULE + # non-empty at configure time, or by adding -multi_module to the + # link flags. + rm -rf libconftest.dylib* + echo "int foo(void){return 1;}" > conftest.c + echo "$LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \ +-dynamiclib -Wl,-single_module conftest.c" >&5 + $LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \ + -dynamiclib -Wl,-single_module conftest.c 2>conftest.err + _lt_result=$? + # If there is a non-empty error log, and "single_module" + # appears in it, assume the flag caused a linker warning + if test -s conftest.err && $GREP single_module conftest.err; then + cat conftest.err >&5 + # Otherwise, if the output was created with a 0 exit code from + # the compiler, it worked. + elif test -f libconftest.dylib && test $_lt_result -eq 0; then + lt_cv_apple_cc_single_mod=yes + else + cat conftest.err >&5 + fi + rm -rf libconftest.dylib* + rm -f conftest.* + fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_apple_cc_single_mod" >&5 +$as_echo "$lt_cv_apple_cc_single_mod" >&6; } + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -exported_symbols_list linker flag" >&5 +$as_echo_n "checking for -exported_symbols_list linker flag... " >&6; } +if ${lt_cv_ld_exported_symbols_list+:} false; then : + $as_echo_n "(cached) " >&6 +else + lt_cv_ld_exported_symbols_list=no + save_LDFLAGS=$LDFLAGS + echo "_main" > conftest.sym + LDFLAGS="$LDFLAGS -Wl,-exported_symbols_list,conftest.sym" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + lt_cv_ld_exported_symbols_list=yes +else + lt_cv_ld_exported_symbols_list=no +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext + LDFLAGS="$save_LDFLAGS" + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_exported_symbols_list" >&5 +$as_echo "$lt_cv_ld_exported_symbols_list" >&6; } + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -force_load linker flag" >&5 +$as_echo_n "checking for -force_load linker flag... " >&6; } +if ${lt_cv_ld_force_load+:} false; then : + $as_echo_n "(cached) " >&6 +else + lt_cv_ld_force_load=no + cat > conftest.c << _LT_EOF +int forced_loaded() { return 2;} +_LT_EOF + echo "$LTCC $LTCFLAGS -c -o conftest.o conftest.c" >&5 + $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5 + echo "$AR cru libconftest.a conftest.o" >&5 + $AR cru libconftest.a conftest.o 2>&5 + echo "$RANLIB libconftest.a" >&5 + $RANLIB libconftest.a 2>&5 + cat > conftest.c << _LT_EOF +int main() { return 0;} +_LT_EOF + echo "$LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a" >&5 + $LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a 2>conftest.err + _lt_result=$? + if test -s conftest.err && $GREP force_load conftest.err; then + cat conftest.err >&5 + elif test -f conftest && test $_lt_result -eq 0 && $GREP forced_load conftest >/dev/null 2>&1 ; then + lt_cv_ld_force_load=yes + else + cat conftest.err >&5 + fi + rm -f conftest.err libconftest.a conftest conftest.c + rm -rf conftest.dSYM + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_force_load" >&5 +$as_echo "$lt_cv_ld_force_load" >&6; } + case $host_os in + rhapsody* | darwin1.[012]) + _lt_dar_allow_undefined='${wl}-undefined ${wl}suppress' ;; + darwin1.*) + _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;; + darwin*) # darwin 5.x on + # if running on 10.5 or later, the deployment target defaults + # to the OS version, if on x86, and 10.4, the deployment + # target defaults to 10.4. Don't you love it? + case ${MACOSX_DEPLOYMENT_TARGET-10.0},$host in + 10.0,*86*-darwin8*|10.0,*-darwin[91]*) + _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;; + 10.[012]*) + _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;; + 10.*) + _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;; + esac + ;; + esac + if test "$lt_cv_apple_cc_single_mod" = "yes"; then + _lt_dar_single_mod='$single_module' + fi + if test "$lt_cv_ld_exported_symbols_list" = "yes"; then + _lt_dar_export_syms=' ${wl}-exported_symbols_list,$output_objdir/${libname}-symbols.expsym' + else + _lt_dar_export_syms='~$NMEDIT -s $output_objdir/${libname}-symbols.expsym ${lib}' + fi + if test "$DSYMUTIL" != ":" && test "$lt_cv_ld_force_load" = "no"; then + _lt_dsymutil='~$DSYMUTIL $lib || :' + else + _lt_dsymutil= + fi + ;; + esac + +for ac_header in dlfcn.h +do : + ac_fn_c_check_header_compile "$LINENO" "dlfcn.h" "ac_cv_header_dlfcn_h" "$ac_includes_default +" +if test "x$ac_cv_header_dlfcn_h" = xyes; then : + cat >>confdefs.h <<_ACEOF +#define HAVE_DLFCN_H 1 +_ACEOF + +fi + +done + + + + + +# Set options + + + + enable_dlopen=no + + + + # Check whether --enable-shared was given. +if test "${enable_shared+set}" = set; then : + enableval=$enable_shared; p=${PACKAGE-default} + case $enableval in + yes) enable_shared=yes ;; + no) enable_shared=no ;; + *) + enable_shared=no + # Look at the argument we got. We use all the common list separators. + lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," + for pkg in $enableval; do + IFS="$lt_save_ifs" + if test "X$pkg" = "X$p"; then + enable_shared=yes + fi + done + IFS="$lt_save_ifs" + ;; + esac +else + enable_shared=yes +fi + + + + + + + + + + # Check whether --enable-static was given. +if test "${enable_static+set}" = set; then : + enableval=$enable_static; p=${PACKAGE-default} + case $enableval in + yes) enable_static=yes ;; + no) enable_static=no ;; + *) + enable_static=no + # Look at the argument we got. We use all the common list separators. + lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," + for pkg in $enableval; do + IFS="$lt_save_ifs" + if test "X$pkg" = "X$p"; then + enable_static=yes + fi + done + IFS="$lt_save_ifs" + ;; + esac +else + enable_static=yes +fi + + + + + + + + + + +# Check whether --with-pic was given. +if test "${with_pic+set}" = set; then : + withval=$with_pic; lt_p=${PACKAGE-default} + case $withval in + yes|no) pic_mode=$withval ;; + *) + pic_mode=default + # Look at the argument we got. We use all the common list separators. + lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," + for lt_pkg in $withval; do + IFS="$lt_save_ifs" + if test "X$lt_pkg" = "X$lt_p"; then + pic_mode=yes + fi + done + IFS="$lt_save_ifs" + ;; + esac +else + pic_mode=default +fi + + +test -z "$pic_mode" && pic_mode=default + + + + + + + + # Check whether --enable-fast-install was given. +if test "${enable_fast_install+set}" = set; then : + enableval=$enable_fast_install; p=${PACKAGE-default} + case $enableval in + yes) enable_fast_install=yes ;; + no) enable_fast_install=no ;; + *) + enable_fast_install=no + # Look at the argument we got. We use all the common list separators. + lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," + for pkg in $enableval; do + IFS="$lt_save_ifs" + if test "X$pkg" = "X$p"; then + enable_fast_install=yes + fi + done + IFS="$lt_save_ifs" + ;; + esac +else + enable_fast_install=yes +fi + + + + + + + + + + + +# This can be used to rebuild libtool when needed +LIBTOOL_DEPS="$ltmain" + +# Always use our own libtool. +LIBTOOL='$(SHELL) $(top_builddir)/libtool' + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +test -z "$LN_S" && LN_S="ln -s" + + + + + + + + + + + + + + +if test -n "${ZSH_VERSION+set}" ; then + setopt NO_GLOB_SUBST +fi + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for objdir" >&5 +$as_echo_n "checking for objdir... " >&6; } +if ${lt_cv_objdir+:} false; then : + $as_echo_n "(cached) " >&6 +else + rm -f .libs 2>/dev/null +mkdir .libs 2>/dev/null +if test -d .libs; then + lt_cv_objdir=.libs +else + # MS-DOS does not allow filenames that begin with a dot. + lt_cv_objdir=_libs +fi +rmdir .libs 2>/dev/null +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_objdir" >&5 +$as_echo "$lt_cv_objdir" >&6; } +objdir=$lt_cv_objdir + + + + + +cat >>confdefs.h <<_ACEOF +#define LT_OBJDIR "$lt_cv_objdir/" +_ACEOF + + + + +case $host_os in +aix3*) + # AIX sometimes has problems with the GCC collect2 program. For some + # reason, if we set the COLLECT_NAMES environment variable, the problems + # vanish in a puff of smoke. + if test "X${COLLECT_NAMES+set}" != Xset; then + COLLECT_NAMES= + export COLLECT_NAMES + fi + ;; +esac + +# Global variables: +ofile=libtool +can_build_shared=yes + +# All known linkers require a `.a' archive for static linking (except MSVC, +# which needs '.lib'). +libext=a + +with_gnu_ld="$lt_cv_prog_gnu_ld" + +old_CC="$CC" +old_CFLAGS="$CFLAGS" + +# Set sane defaults for various variables +test -z "$CC" && CC=cc +test -z "$LTCC" && LTCC=$CC +test -z "$LTCFLAGS" && LTCFLAGS=$CFLAGS +test -z "$LD" && LD=ld +test -z "$ac_objext" && ac_objext=o + +for cc_temp in $compiler""; do + case $cc_temp in + compile | *[\\/]compile | ccache | *[\\/]ccache ) ;; + distcc | *[\\/]distcc | purify | *[\\/]purify ) ;; + \-*) ;; + *) break;; + esac +done +cc_basename=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"` + + +# Only perform the check for file, if the check method requires it +test -z "$MAGIC_CMD" && MAGIC_CMD=file +case $deplibs_check_method in +file_magic*) + if test "$file_magic_cmd" = '$MAGIC_CMD'; then + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ${ac_tool_prefix}file" >&5 +$as_echo_n "checking for ${ac_tool_prefix}file... " >&6; } +if ${lt_cv_path_MAGIC_CMD+:} false; then : + $as_echo_n "(cached) " >&6 +else + case $MAGIC_CMD in +[\\/*] | ?:[\\/]*) + lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path. + ;; +*) + lt_save_MAGIC_CMD="$MAGIC_CMD" + lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR + ac_dummy="/usr/bin$PATH_SEPARATOR$PATH" + for ac_dir in $ac_dummy; do + IFS="$lt_save_ifs" + test -z "$ac_dir" && ac_dir=. + if test -f $ac_dir/${ac_tool_prefix}file; then + lt_cv_path_MAGIC_CMD="$ac_dir/${ac_tool_prefix}file" + if test -n "$file_magic_test_file"; then + case $deplibs_check_method in + "file_magic "*) + file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"` + MAGIC_CMD="$lt_cv_path_MAGIC_CMD" + if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null | + $EGREP "$file_magic_regex" > /dev/null; then + : + else + cat <<_LT_EOF 1>&2 + +*** Warning: the command libtool uses to detect shared libraries, +*** $file_magic_cmd, produces output that libtool cannot recognize. +*** The result is that libtool may fail to recognize shared libraries +*** as such. This will affect the creation of libtool libraries that +*** depend on shared libraries, but programs linked with such libtool +*** libraries will work regardless of this problem. Nevertheless, you +*** may want to report the problem to your system manager and/or to +*** bug-libtool@gnu.org + +_LT_EOF + fi ;; + esac + fi + break + fi + done + IFS="$lt_save_ifs" + MAGIC_CMD="$lt_save_MAGIC_CMD" + ;; +esac +fi + +MAGIC_CMD="$lt_cv_path_MAGIC_CMD" +if test -n "$MAGIC_CMD"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MAGIC_CMD" >&5 +$as_echo "$MAGIC_CMD" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + + + +if test -z "$lt_cv_path_MAGIC_CMD"; then + if test -n "$ac_tool_prefix"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for file" >&5 +$as_echo_n "checking for file... " >&6; } +if ${lt_cv_path_MAGIC_CMD+:} false; then : + $as_echo_n "(cached) " >&6 +else + case $MAGIC_CMD in +[\\/*] | ?:[\\/]*) + lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path. + ;; +*) + lt_save_MAGIC_CMD="$MAGIC_CMD" + lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR + ac_dummy="/usr/bin$PATH_SEPARATOR$PATH" + for ac_dir in $ac_dummy; do + IFS="$lt_save_ifs" + test -z "$ac_dir" && ac_dir=. + if test -f $ac_dir/file; then + lt_cv_path_MAGIC_CMD="$ac_dir/file" + if test -n "$file_magic_test_file"; then + case $deplibs_check_method in + "file_magic "*) + file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"` + MAGIC_CMD="$lt_cv_path_MAGIC_CMD" + if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null | + $EGREP "$file_magic_regex" > /dev/null; then + : + else + cat <<_LT_EOF 1>&2 + +*** Warning: the command libtool uses to detect shared libraries, +*** $file_magic_cmd, produces output that libtool cannot recognize. +*** The result is that libtool may fail to recognize shared libraries +*** as such. This will affect the creation of libtool libraries that +*** depend on shared libraries, but programs linked with such libtool +*** libraries will work regardless of this problem. Nevertheless, you +*** may want to report the problem to your system manager and/or to +*** bug-libtool@gnu.org + +_LT_EOF + fi ;; + esac + fi + break + fi + done + IFS="$lt_save_ifs" + MAGIC_CMD="$lt_save_MAGIC_CMD" + ;; +esac +fi + +MAGIC_CMD="$lt_cv_path_MAGIC_CMD" +if test -n "$MAGIC_CMD"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MAGIC_CMD" >&5 +$as_echo "$MAGIC_CMD" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + else + MAGIC_CMD=: + fi +fi + + fi + ;; +esac + +# Use C for the default configuration in the libtool script + +lt_save_CC="$CC" +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + + +# Source file extension for C test sources. +ac_ext=c + +# Object file extension for compiled C test sources. +objext=o +objext=$objext + +# Code to be used in simple compile tests +lt_simple_compile_test_code="int some_variable = 0;" + +# Code to be used in simple link tests +lt_simple_link_test_code='int main(){return(0);}' + + + + + + + +# If no C compiler was specified, use CC. +LTCC=${LTCC-"$CC"} + +# If no C compiler flags were specified, use CFLAGS. +LTCFLAGS=${LTCFLAGS-"$CFLAGS"} + +# Allow CC to be a program name with arguments. +compiler=$CC + +# Save the default compiler, since it gets overwritten when the other +# tags are being tested, and _LT_TAGVAR(compiler, []) is a NOP. +compiler_DEFAULT=$CC + +# save warnings/boilerplate of simple test code +ac_outfile=conftest.$ac_objext +echo "$lt_simple_compile_test_code" >conftest.$ac_ext +eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err +_lt_compiler_boilerplate=`cat conftest.err` +$RM conftest* + +ac_outfile=conftest.$ac_objext +echo "$lt_simple_link_test_code" >conftest.$ac_ext +eval "$ac_link" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err +_lt_linker_boilerplate=`cat conftest.err` +$RM -r conftest* + + +## CAVEAT EMPTOR: +## There is no encapsulation within the following macros, do not change +## the running order or otherwise move them around unless you know exactly +## what you are doing... +if test -n "$compiler"; then + +lt_prog_compiler_no_builtin_flag= + +if test "$GCC" = yes; then + case $cc_basename in + nvcc*) + lt_prog_compiler_no_builtin_flag=' -Xcompiler -fno-builtin' ;; + *) + lt_prog_compiler_no_builtin_flag=' -fno-builtin' ;; + esac + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -fno-rtti -fno-exceptions" >&5 +$as_echo_n "checking if $compiler supports -fno-rtti -fno-exceptions... " >&6; } +if ${lt_cv_prog_compiler_rtti_exceptions+:} false; then : + $as_echo_n "(cached) " >&6 +else + lt_cv_prog_compiler_rtti_exceptions=no + ac_outfile=conftest.$ac_objext + echo "$lt_simple_compile_test_code" > conftest.$ac_ext + lt_compiler_flag="-fno-rtti -fno-exceptions" + # Insert the option either (1) after the last *FLAGS variable, or + # (2) before a word containing "conftest.", or (3) at the end. + # Note that $ac_compile itself does not contain backslashes and begins + # with a dollar sign (not a hyphen), so the echo should work correctly. + # The option is referenced via a variable to avoid confusing sed. + lt_compile=`echo "$ac_compile" | $SED \ + -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ + -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ + -e 's:$: $lt_compiler_flag:'` + (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) + (eval "$lt_compile" 2>conftest.err) + ac_status=$? + cat conftest.err >&5 + echo "$as_me:$LINENO: \$? = $ac_status" >&5 + if (exit $ac_status) && test -s "$ac_outfile"; then + # The compiler can only warn and ignore the option if not recognized + # So say no if there are warnings other than the usual output. + $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp + $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 + if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then + lt_cv_prog_compiler_rtti_exceptions=yes + fi + fi + $RM conftest* + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_rtti_exceptions" >&5 +$as_echo "$lt_cv_prog_compiler_rtti_exceptions" >&6; } + +if test x"$lt_cv_prog_compiler_rtti_exceptions" = xyes; then + lt_prog_compiler_no_builtin_flag="$lt_prog_compiler_no_builtin_flag -fno-rtti -fno-exceptions" +else + : +fi + +fi + + + + + + + lt_prog_compiler_wl= +lt_prog_compiler_pic= +lt_prog_compiler_static= + + + if test "$GCC" = yes; then + lt_prog_compiler_wl='-Wl,' + lt_prog_compiler_static='-static' + + case $host_os in + aix*) + # All AIX code is PIC. + if test "$host_cpu" = ia64; then + # AIX 5 now supports IA64 processor + lt_prog_compiler_static='-Bstatic' + fi + ;; + + amigaos*) + case $host_cpu in + powerpc) + # see comment about AmigaOS4 .so support + lt_prog_compiler_pic='-fPIC' + ;; + m68k) + # FIXME: we need at least 68020 code to build shared libraries, but + # adding the `-m68020' flag to GCC prevents building anything better, + # like `-m68040'. + lt_prog_compiler_pic='-m68020 -resident32 -malways-restore-a4' + ;; + esac + ;; + + beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*) + # PIC is the default for these OSes. + ;; + + mingw* | cygwin* | pw32* | os2* | cegcc*) + # This hack is so that the source file can tell whether it is being + # built for inclusion in a dll (and should export symbols for example). + # Although the cygwin gcc ignores -fPIC, still need this for old-style + # (--disable-auto-import) libraries + lt_prog_compiler_pic='-DDLL_EXPORT' + ;; + + darwin* | rhapsody*) + # PIC is the default on this platform + # Common symbols not allowed in MH_DYLIB files + lt_prog_compiler_pic='-fno-common' + ;; + + haiku*) + # PIC is the default for Haiku. + # The "-static" flag exists, but is broken. + lt_prog_compiler_static= + ;; + + hpux*) + # PIC is the default for 64-bit PA HP-UX, but not for 32-bit + # PA HP-UX. On IA64 HP-UX, PIC is the default but the pic flag + # sets the default TLS model and affects inlining. + case $host_cpu in + hppa*64*) + # +Z the default + ;; + *) + lt_prog_compiler_pic='-fPIC' + ;; + esac + ;; + + interix[3-9]*) + # Interix 3.x gcc -fpic/-fPIC options generate broken code. + # Instead, we relocate shared libraries at runtime. + ;; + + msdosdjgpp*) + # Just because we use GCC doesn't mean we suddenly get shared libraries + # on systems that don't support them. + lt_prog_compiler_can_build_shared=no + enable_shared=no + ;; + + *nto* | *qnx*) + # QNX uses GNU C++, but need to define -shared option too, otherwise + # it will coredump. + lt_prog_compiler_pic='-fPIC -shared' + ;; + + sysv4*MP*) + if test -d /usr/nec; then + lt_prog_compiler_pic=-Kconform_pic + fi + ;; + + *) + lt_prog_compiler_pic='-fPIC' + ;; + esac + + case $cc_basename in + nvcc*) # Cuda Compiler Driver 2.2 + lt_prog_compiler_wl='-Xlinker ' + if test -n "$lt_prog_compiler_pic"; then + lt_prog_compiler_pic="-Xcompiler $lt_prog_compiler_pic" + fi + ;; + esac + else + # PORTME Check for flag to pass linker flags through the system compiler. + case $host_os in + aix*) + lt_prog_compiler_wl='-Wl,' + if test "$host_cpu" = ia64; then + # AIX 5 now supports IA64 processor + lt_prog_compiler_static='-Bstatic' + else + lt_prog_compiler_static='-bnso -bI:/lib/syscalls.exp' + fi + ;; + + mingw* | cygwin* | pw32* | os2* | cegcc*) + # This hack is so that the source file can tell whether it is being + # built for inclusion in a dll (and should export symbols for example). + lt_prog_compiler_pic='-DDLL_EXPORT' + ;; + + hpux9* | hpux10* | hpux11*) + lt_prog_compiler_wl='-Wl,' + # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but + # not for PA HP-UX. + case $host_cpu in + hppa*64*|ia64*) + # +Z the default + ;; + *) + lt_prog_compiler_pic='+Z' + ;; + esac + # Is there a better lt_prog_compiler_static that works with the bundled CC? + lt_prog_compiler_static='${wl}-a ${wl}archive' + ;; + + irix5* | irix6* | nonstopux*) + lt_prog_compiler_wl='-Wl,' + # PIC (with -KPIC) is the default. + lt_prog_compiler_static='-non_shared' + ;; + + linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) + case $cc_basename in + # old Intel for x86_64 which still supported -KPIC. + ecc*) + lt_prog_compiler_wl='-Wl,' + lt_prog_compiler_pic='-KPIC' + lt_prog_compiler_static='-static' + ;; + # icc used to be incompatible with GCC. + # ICC 10 doesn't accept -KPIC any more. + icc* | ifort*) + lt_prog_compiler_wl='-Wl,' + lt_prog_compiler_pic='-fPIC' + lt_prog_compiler_static='-static' + ;; + # Lahey Fortran 8.1. + lf95*) + lt_prog_compiler_wl='-Wl,' + lt_prog_compiler_pic='--shared' + lt_prog_compiler_static='--static' + ;; + nagfor*) + # NAG Fortran compiler + lt_prog_compiler_wl='-Wl,-Wl,,' + lt_prog_compiler_pic='-PIC' + lt_prog_compiler_static='-Bstatic' + ;; + pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*) + # Portland Group compilers (*not* the Pentium gcc compiler, + # which looks to be a dead project) + lt_prog_compiler_wl='-Wl,' + lt_prog_compiler_pic='-fpic' + lt_prog_compiler_static='-Bstatic' + ;; + ccc*) + lt_prog_compiler_wl='-Wl,' + # All Alpha code is PIC. + lt_prog_compiler_static='-non_shared' + ;; + xl* | bgxl* | bgf* | mpixl*) + # IBM XL C 8.0/Fortran 10.1, 11.1 on PPC and BlueGene + lt_prog_compiler_wl='-Wl,' + lt_prog_compiler_pic='-qpic' + lt_prog_compiler_static='-qstaticlink' + ;; + *) + case `$CC -V 2>&1 | sed 5q` in + *Sun\ Ceres\ Fortran* | *Sun*Fortran*\ [1-7].* | *Sun*Fortran*\ 8.[0-3]*) + # Sun Fortran 8.3 passes all unrecognized flags to the linker + lt_prog_compiler_pic='-KPIC' + lt_prog_compiler_static='-Bstatic' + lt_prog_compiler_wl='' + ;; + *Sun\ F* | *Sun*Fortran*) + lt_prog_compiler_pic='-KPIC' + lt_prog_compiler_static='-Bstatic' + lt_prog_compiler_wl='-Qoption ld ' + ;; + *Sun\ C*) + # Sun C 5.9 + lt_prog_compiler_pic='-KPIC' + lt_prog_compiler_static='-Bstatic' + lt_prog_compiler_wl='-Wl,' + ;; + *Intel*\ [CF]*Compiler*) + lt_prog_compiler_wl='-Wl,' + lt_prog_compiler_pic='-fPIC' + lt_prog_compiler_static='-static' + ;; + *Portland\ Group*) + lt_prog_compiler_wl='-Wl,' + lt_prog_compiler_pic='-fpic' + lt_prog_compiler_static='-Bstatic' + ;; + esac + ;; + esac + ;; + + newsos6) + lt_prog_compiler_pic='-KPIC' + lt_prog_compiler_static='-Bstatic' + ;; + + *nto* | *qnx*) + # QNX uses GNU C++, but need to define -shared option too, otherwise + # it will coredump. + lt_prog_compiler_pic='-fPIC -shared' + ;; + + osf3* | osf4* | osf5*) + lt_prog_compiler_wl='-Wl,' + # All OSF/1 code is PIC. + lt_prog_compiler_static='-non_shared' + ;; + + rdos*) + lt_prog_compiler_static='-non_shared' + ;; + + solaris*) + lt_prog_compiler_pic='-KPIC' + lt_prog_compiler_static='-Bstatic' + case $cc_basename in + f77* | f90* | f95* | sunf77* | sunf90* | sunf95*) + lt_prog_compiler_wl='-Qoption ld ';; + *) + lt_prog_compiler_wl='-Wl,';; + esac + ;; + + sunos4*) + lt_prog_compiler_wl='-Qoption ld ' + lt_prog_compiler_pic='-PIC' + lt_prog_compiler_static='-Bstatic' + ;; + + sysv4 | sysv4.2uw2* | sysv4.3*) + lt_prog_compiler_wl='-Wl,' + lt_prog_compiler_pic='-KPIC' + lt_prog_compiler_static='-Bstatic' + ;; + + sysv4*MP*) + if test -d /usr/nec ;then + lt_prog_compiler_pic='-Kconform_pic' + lt_prog_compiler_static='-Bstatic' + fi + ;; + + sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*) + lt_prog_compiler_wl='-Wl,' + lt_prog_compiler_pic='-KPIC' + lt_prog_compiler_static='-Bstatic' + ;; + + unicos*) + lt_prog_compiler_wl='-Wl,' + lt_prog_compiler_can_build_shared=no + ;; + + uts4*) + lt_prog_compiler_pic='-pic' + lt_prog_compiler_static='-Bstatic' + ;; + + *) + lt_prog_compiler_can_build_shared=no + ;; + esac + fi + +case $host_os in + # For platforms which do not support PIC, -DPIC is meaningless: + *djgpp*) + lt_prog_compiler_pic= + ;; + *) + lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC" + ;; +esac + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 +$as_echo_n "checking for $compiler option to produce PIC... " >&6; } +if ${lt_cv_prog_compiler_pic+:} false; then : + $as_echo_n "(cached) " >&6 +else + lt_cv_prog_compiler_pic=$lt_prog_compiler_pic +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5 +$as_echo "$lt_cv_prog_compiler_pic" >&6; } +lt_prog_compiler_pic=$lt_cv_prog_compiler_pic + +# +# Check to make sure the PIC flag actually works. +# +if test -n "$lt_prog_compiler_pic"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler PIC flag $lt_prog_compiler_pic works" >&5 +$as_echo_n "checking if $compiler PIC flag $lt_prog_compiler_pic works... " >&6; } +if ${lt_cv_prog_compiler_pic_works+:} false; then : + $as_echo_n "(cached) " >&6 +else + lt_cv_prog_compiler_pic_works=no + ac_outfile=conftest.$ac_objext + echo "$lt_simple_compile_test_code" > conftest.$ac_ext + lt_compiler_flag="$lt_prog_compiler_pic -DPIC" + # Insert the option either (1) after the last *FLAGS variable, or + # (2) before a word containing "conftest.", or (3) at the end. + # Note that $ac_compile itself does not contain backslashes and begins + # with a dollar sign (not a hyphen), so the echo should work correctly. + # The option is referenced via a variable to avoid confusing sed. + lt_compile=`echo "$ac_compile" | $SED \ + -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ + -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ + -e 's:$: $lt_compiler_flag:'` + (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) + (eval "$lt_compile" 2>conftest.err) + ac_status=$? + cat conftest.err >&5 + echo "$as_me:$LINENO: \$? = $ac_status" >&5 + if (exit $ac_status) && test -s "$ac_outfile"; then + # The compiler can only warn and ignore the option if not recognized + # So say no if there are warnings other than the usual output. + $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp + $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 + if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then + lt_cv_prog_compiler_pic_works=yes + fi + fi + $RM conftest* + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_works" >&5 +$as_echo "$lt_cv_prog_compiler_pic_works" >&6; } + +if test x"$lt_cv_prog_compiler_pic_works" = xyes; then + case $lt_prog_compiler_pic in + "" | " "*) ;; + *) lt_prog_compiler_pic=" $lt_prog_compiler_pic" ;; + esac +else + lt_prog_compiler_pic= + lt_prog_compiler_can_build_shared=no +fi + +fi + + + + + + + + + + + +# +# Check to make sure the static flag actually works. +# +wl=$lt_prog_compiler_wl eval lt_tmp_static_flag=\"$lt_prog_compiler_static\" +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler static flag $lt_tmp_static_flag works" >&5 +$as_echo_n "checking if $compiler static flag $lt_tmp_static_flag works... " >&6; } +if ${lt_cv_prog_compiler_static_works+:} false; then : + $as_echo_n "(cached) " >&6 +else + lt_cv_prog_compiler_static_works=no + save_LDFLAGS="$LDFLAGS" + LDFLAGS="$LDFLAGS $lt_tmp_static_flag" + echo "$lt_simple_link_test_code" > conftest.$ac_ext + if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then + # The linker can only warn and ignore the option if not recognized + # So say no if there are warnings + if test -s conftest.err; then + # Append any errors to the config.log. + cat conftest.err 1>&5 + $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp + $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 + if diff conftest.exp conftest.er2 >/dev/null; then + lt_cv_prog_compiler_static_works=yes + fi + else + lt_cv_prog_compiler_static_works=yes + fi + fi + $RM -r conftest* + LDFLAGS="$save_LDFLAGS" + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_static_works" >&5 +$as_echo "$lt_cv_prog_compiler_static_works" >&6; } + +if test x"$lt_cv_prog_compiler_static_works" = xyes; then + : +else + lt_prog_compiler_static= +fi + + + + + + + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5 +$as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; } +if ${lt_cv_prog_compiler_c_o+:} false; then : + $as_echo_n "(cached) " >&6 +else + lt_cv_prog_compiler_c_o=no + $RM -r conftest 2>/dev/null + mkdir conftest + cd conftest + mkdir out + echo "$lt_simple_compile_test_code" > conftest.$ac_ext + + lt_compiler_flag="-o out/conftest2.$ac_objext" + # Insert the option either (1) after the last *FLAGS variable, or + # (2) before a word containing "conftest.", or (3) at the end. + # Note that $ac_compile itself does not contain backslashes and begins + # with a dollar sign (not a hyphen), so the echo should work correctly. + lt_compile=`echo "$ac_compile" | $SED \ + -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ + -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ + -e 's:$: $lt_compiler_flag:'` + (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) + (eval "$lt_compile" 2>out/conftest.err) + ac_status=$? + cat out/conftest.err >&5 + echo "$as_me:$LINENO: \$? = $ac_status" >&5 + if (exit $ac_status) && test -s out/conftest2.$ac_objext + then + # The compiler can only warn and ignore the option if not recognized + # So say no if there are warnings + $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp + $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2 + if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then + lt_cv_prog_compiler_c_o=yes + fi + fi + chmod u+w . 2>&5 + $RM conftest* + # SGI C++ compiler will create directory out/ii_files/ for + # template instantiation + test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files + $RM out/* && rmdir out + cd .. + $RM -r conftest + $RM conftest* + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o" >&5 +$as_echo "$lt_cv_prog_compiler_c_o" >&6; } + + + + + + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5 +$as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; } +if ${lt_cv_prog_compiler_c_o+:} false; then : + $as_echo_n "(cached) " >&6 +else + lt_cv_prog_compiler_c_o=no + $RM -r conftest 2>/dev/null + mkdir conftest + cd conftest + mkdir out + echo "$lt_simple_compile_test_code" > conftest.$ac_ext + + lt_compiler_flag="-o out/conftest2.$ac_objext" + # Insert the option either (1) after the last *FLAGS variable, or + # (2) before a word containing "conftest.", or (3) at the end. + # Note that $ac_compile itself does not contain backslashes and begins + # with a dollar sign (not a hyphen), so the echo should work correctly. + lt_compile=`echo "$ac_compile" | $SED \ + -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ + -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ + -e 's:$: $lt_compiler_flag:'` + (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) + (eval "$lt_compile" 2>out/conftest.err) + ac_status=$? + cat out/conftest.err >&5 + echo "$as_me:$LINENO: \$? = $ac_status" >&5 + if (exit $ac_status) && test -s out/conftest2.$ac_objext + then + # The compiler can only warn and ignore the option if not recognized + # So say no if there are warnings + $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp + $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2 + if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then + lt_cv_prog_compiler_c_o=yes + fi + fi + chmod u+w . 2>&5 + $RM conftest* + # SGI C++ compiler will create directory out/ii_files/ for + # template instantiation + test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files + $RM out/* && rmdir out + cd .. + $RM -r conftest + $RM conftest* + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o" >&5 +$as_echo "$lt_cv_prog_compiler_c_o" >&6; } + + + + +hard_links="nottested" +if test "$lt_cv_prog_compiler_c_o" = no && test "$need_locks" != no; then + # do not overwrite the value of need_locks provided by the user + { $as_echo "$as_me:${as_lineno-$LINENO}: checking if we can lock with hard links" >&5 +$as_echo_n "checking if we can lock with hard links... " >&6; } + hard_links=yes + $RM conftest* + ln conftest.a conftest.b 2>/dev/null && hard_links=no + touch conftest.a + ln conftest.a conftest.b 2>&5 || hard_links=no + ln conftest.a conftest.b 2>/dev/null && hard_links=no + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hard_links" >&5 +$as_echo "$hard_links" >&6; } + if test "$hard_links" = no; then + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&5 +$as_echo "$as_me: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&2;} + need_locks=warn + fi +else + need_locks=no +fi + + + + + + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $compiler linker ($LD) supports shared libraries" >&5 +$as_echo_n "checking whether the $compiler linker ($LD) supports shared libraries... " >&6; } + + runpath_var= + allow_undefined_flag= + always_export_symbols=no + archive_cmds= + archive_expsym_cmds= + compiler_needs_object=no + enable_shared_with_static_runtimes=no + export_dynamic_flag_spec= + export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' + hardcode_automatic=no + hardcode_direct=no + hardcode_direct_absolute=no + hardcode_libdir_flag_spec= + hardcode_libdir_separator= + hardcode_minus_L=no + hardcode_shlibpath_var=unsupported + inherit_rpath=no + link_all_deplibs=unknown + module_cmds= + module_expsym_cmds= + old_archive_from_new_cmds= + old_archive_from_expsyms_cmds= + thread_safe_flag_spec= + whole_archive_flag_spec= + # include_expsyms should be a list of space-separated symbols to be *always* + # included in the symbol list + include_expsyms= + # exclude_expsyms can be an extended regexp of symbols to exclude + # it will be wrapped by ` (' and `)$', so one must not match beginning or + # end of line. Example: `a|bc|.*d.*' will exclude the symbols `a' and `bc', + # as well as any symbol that contains `d'. + exclude_expsyms='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*' + # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out + # platforms (ab)use it in PIC code, but their linkers get confused if + # the symbol is explicitly referenced. Since portable code cannot + # rely on this symbol name, it's probably fine to never include it in + # preloaded symbol tables. + # Exclude shared library initialization/finalization symbols. + extract_expsyms_cmds= + + case $host_os in + cygwin* | mingw* | pw32* | cegcc*) + # FIXME: the MSVC++ port hasn't been tested in a loooong time + # When not using gcc, we currently assume that we are using + # Microsoft Visual C++. + if test "$GCC" != yes; then + with_gnu_ld=no + fi + ;; + interix*) + # we just hope/assume this is gcc and not c89 (= MSVC++) + with_gnu_ld=yes + ;; + openbsd*) + with_gnu_ld=no + ;; + linux* | k*bsd*-gnu | gnu*) + link_all_deplibs=no + ;; + esac + + ld_shlibs=yes + + # On some targets, GNU ld is compatible enough with the native linker + # that we're better off using the native interface for both. + lt_use_gnu_ld_interface=no + if test "$with_gnu_ld" = yes; then + case $host_os in + aix*) + # The AIX port of GNU ld has always aspired to compatibility + # with the native linker. However, as the warning in the GNU ld + # block says, versions before 2.19.5* couldn't really create working + # shared libraries, regardless of the interface used. + case `$LD -v 2>&1` in + *\ \(GNU\ Binutils\)\ 2.19.5*) ;; + *\ \(GNU\ Binutils\)\ 2.[2-9]*) ;; + *\ \(GNU\ Binutils\)\ [3-9]*) ;; + *) + lt_use_gnu_ld_interface=yes + ;; + esac + ;; + *) + lt_use_gnu_ld_interface=yes + ;; + esac + fi + + if test "$lt_use_gnu_ld_interface" = yes; then + # If archive_cmds runs LD, not CC, wlarc should be empty + wlarc='${wl}' + + # Set some defaults for GNU ld with shared library support. These + # are reset later if shared libraries are not supported. Putting them + # here allows them to be overridden if necessary. + runpath_var=LD_RUN_PATH + hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' + export_dynamic_flag_spec='${wl}--export-dynamic' + # ancient GNU ld didn't support --whole-archive et. al. + if $LD --help 2>&1 | $GREP 'no-whole-archive' > /dev/null; then + whole_archive_flag_spec="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive' + else + whole_archive_flag_spec= + fi + supports_anon_versioning=no + case `$LD -v 2>&1` in + *GNU\ gold*) supports_anon_versioning=yes ;; + *\ [01].* | *\ 2.[0-9].* | *\ 2.10.*) ;; # catch versions < 2.11 + *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ... + *\ 2.11.92.0.12\ *) supports_anon_versioning=yes ;; # Mandrake 8.2 ... + *\ 2.11.*) ;; # other 2.11 versions + *) supports_anon_versioning=yes ;; + esac + + # See if GNU ld supports shared libraries. + case $host_os in + aix[3-9]*) + # On AIX/PPC, the GNU linker is very broken + if test "$host_cpu" != ia64; then + ld_shlibs=no + cat <<_LT_EOF 1>&2 + +*** Warning: the GNU linker, at least up to release 2.19, is reported +*** to be unable to reliably create shared libraries on AIX. +*** Therefore, libtool is disabling shared libraries support. If you +*** really care for shared libraries, you may want to install binutils +*** 2.20 or above, or modify your PATH so that a non-GNU linker is found. +*** You will then need to restart the configuration process. + +_LT_EOF + fi + ;; + + amigaos*) + case $host_cpu in + powerpc) + # see comment about AmigaOS4 .so support + archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' + archive_expsym_cmds='' + ;; + m68k) + archive_cmds='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' + hardcode_libdir_flag_spec='-L$libdir' + hardcode_minus_L=yes + ;; + esac + ;; + + beos*) + if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then + allow_undefined_flag=unsupported + # Joseph Beckenbach says some releases of gcc + # support --undefined. This deserves some investigation. FIXME + archive_cmds='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' + else + ld_shlibs=no + fi + ;; + + cygwin* | mingw* | pw32* | cegcc*) + # _LT_TAGVAR(hardcode_libdir_flag_spec, ) is actually meaningless, + # as there is no search path for DLLs. + hardcode_libdir_flag_spec='-L$libdir' + export_dynamic_flag_spec='${wl}--export-all-symbols' + allow_undefined_flag=unsupported + always_export_symbols=no + enable_shared_with_static_runtimes=yes + export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols' + exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname' + + if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then + archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' + # If the export-symbols file already is a .def file (1st line + # is EXPORTS), use it as is; otherwise, prepend... + archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then + cp $export_symbols $output_objdir/$soname.def; + else + echo EXPORTS > $output_objdir/$soname.def; + cat $export_symbols >> $output_objdir/$soname.def; + fi~ + $CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' + else + ld_shlibs=no + fi + ;; + + haiku*) + archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' + link_all_deplibs=yes + ;; + + interix[3-9]*) + hardcode_direct=no + hardcode_shlibpath_var=no + hardcode_libdir_flag_spec='${wl}-rpath,$libdir' + export_dynamic_flag_spec='${wl}-E' + # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc. + # Instead, shared libraries are loaded at an image base (0x10000000 by + # default) and relocated if they conflict, which is a slow very memory + # consuming and fragmenting process. To avoid this, we pick a random, + # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link + # time. Moving up from 0x10000000 also allows more sbrk(2) space. + archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' + archive_expsym_cmds='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' + ;; + + gnu* | linux* | tpf* | k*bsd*-gnu | kopensolaris*-gnu) + tmp_diet=no + if test "$host_os" = linux-dietlibc; then + case $cc_basename in + diet\ *) tmp_diet=yes;; # linux-dietlibc with static linking (!diet-dyn) + esac + fi + if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \ + && test "$tmp_diet" = no + then + tmp_addflag=' $pic_flag' + tmp_sharedflag='-shared' + case $cc_basename,$host_cpu in + pgcc*) # Portland Group C compiler + whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' + tmp_addflag=' $pic_flag' + ;; + pgf77* | pgf90* | pgf95* | pgfortran*) + # Portland Group f77 and f90 compilers + whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' + tmp_addflag=' $pic_flag -Mnomain' ;; + ecc*,ia64* | icc*,ia64*) # Intel C compiler on ia64 + tmp_addflag=' -i_dynamic' ;; + efc*,ia64* | ifort*,ia64*) # Intel Fortran compiler on ia64 + tmp_addflag=' -i_dynamic -nofor_main' ;; + ifc* | ifort*) # Intel Fortran compiler + tmp_addflag=' -nofor_main' ;; + lf95*) # Lahey Fortran 8.1 + whole_archive_flag_spec= + tmp_sharedflag='--shared' ;; + xl[cC]* | bgxl[cC]* | mpixl[cC]*) # IBM XL C 8.0 on PPC (deal with xlf below) + tmp_sharedflag='-qmkshrobj' + tmp_addflag= ;; + nvcc*) # Cuda Compiler Driver 2.2 + whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' + compiler_needs_object=yes + ;; + esac + case `$CC -V 2>&1 | sed 5q` in + *Sun\ C*) # Sun C 5.9 + whole_archive_flag_spec='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' + compiler_needs_object=yes + tmp_sharedflag='-G' ;; + *Sun\ F*) # Sun Fortran 8.3 + tmp_sharedflag='-G' ;; + esac + archive_cmds='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' + + if test "x$supports_anon_versioning" = xyes; then + archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~ + cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ + echo "local: *; };" >> $output_objdir/$libname.ver~ + $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib' + fi + + case $cc_basename in + xlf* | bgf* | bgxlf* | mpixlf*) + # IBM XL Fortran 10.1 on PPC cannot create shared libs itself + whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive' + hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' + archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib' + if test "x$supports_anon_versioning" = xyes; then + archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~ + cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ + echo "local: *; };" >> $output_objdir/$libname.ver~ + $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' + fi + ;; + esac + else + ld_shlibs=no + fi + ;; + + netbsd* | netbsdelf*-gnu) + if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then + archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' + wlarc= + else + archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' + archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + fi + ;; + + solaris*) + if $LD -v 2>&1 | $GREP 'BFD 2\.8' > /dev/null; then + ld_shlibs=no + cat <<_LT_EOF 1>&2 + +*** Warning: The releases 2.8.* of the GNU linker cannot reliably +*** create shared libraries on Solaris systems. Therefore, libtool +*** is disabling shared libraries support. We urge you to upgrade GNU +*** binutils to release 2.9.1 or newer. Another option is to modify +*** your PATH or compiler configuration so that the native linker is +*** used, and then restart. + +_LT_EOF + elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then + archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' + archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + else + ld_shlibs=no + fi + ;; + + sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX*) + case `$LD -v 2>&1` in + *\ [01].* | *\ 2.[0-9].* | *\ 2.1[0-5].*) + ld_shlibs=no + cat <<_LT_EOF 1>&2 + +*** Warning: Releases of the GNU linker prior to 2.16.91.0.3 can not +*** reliably create shared libraries on SCO systems. Therefore, libtool +*** is disabling shared libraries support. We urge you to upgrade GNU +*** binutils to release 2.16.91.0.3 or newer. Another option is to modify +*** your PATH or compiler configuration so that the native linker is +*** used, and then restart. + +_LT_EOF + ;; + *) + # For security reasons, it is highly recommended that you always + # use absolute paths for naming shared libraries, and exclude the + # DT_RUNPATH tag from executables and libraries. But doing so + # requires that you compile everything twice, which is a pain. + if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then + hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' + archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' + archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + else + ld_shlibs=no + fi + ;; + esac + ;; + + sunos4*) + archive_cmds='$LD -assert pure-text -Bshareable -o $lib $libobjs $deplibs $linker_flags' + wlarc= + hardcode_direct=yes + hardcode_shlibpath_var=no + ;; + + *) + if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then + archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' + archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + else + ld_shlibs=no + fi + ;; + esac + + if test "$ld_shlibs" = no; then + runpath_var= + hardcode_libdir_flag_spec= + export_dynamic_flag_spec= + whole_archive_flag_spec= + fi + else + # PORTME fill in a description of your system's linker (not GNU ld) + case $host_os in + aix3*) + allow_undefined_flag=unsupported + always_export_symbols=yes + archive_expsym_cmds='$LD -o $output_objdir/$soname $libobjs $deplibs $linker_flags -bE:$export_symbols -T512 -H512 -bM:SRE~$AR $AR_FLAGS $lib $output_objdir/$soname' + # Note: this linker hardcodes the directories in LIBPATH if there + # are no directories specified by -L. + hardcode_minus_L=yes + if test "$GCC" = yes && test -z "$lt_prog_compiler_static"; then + # Neither direct hardcoding nor static linking is supported with a + # broken collect2. + hardcode_direct=unsupported + fi + ;; + + aix[4-9]*) + if test "$host_cpu" = ia64; then + # On IA64, the linker does run time linking by default, so we don't + # have to do anything special. + aix_use_runtimelinking=no + exp_sym_flag='-Bexport' + no_entry_flag="" + else + # If we're using GNU nm, then we don't want the "-C" option. + # -C means demangle to AIX nm, but means don't demangle with GNU nm + # Also, AIX nm treats weak defined symbols like other global + # defined symbols, whereas GNU nm marks them as "W". + if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then + export_symbols_cmds='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' + else + export_symbols_cmds='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' + fi + aix_use_runtimelinking=no + + # Test if we are trying to use run time linking or normal + # AIX style linking. If -brtl is somewhere in LDFLAGS, we + # need to do runtime linking. + case $host_os in aix4.[23]|aix4.[23].*|aix[5-9]*) + for ld_flag in $LDFLAGS; do + if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then + aix_use_runtimelinking=yes + break + fi + done + ;; + esac + + exp_sym_flag='-bexport' + no_entry_flag='-bnoentry' + fi + + # When large executables or shared objects are built, AIX ld can + # have problems creating the table of contents. If linking a library + # or program results in "error TOC overflow" add -mminimal-toc to + # CXXFLAGS/CFLAGS for g++/gcc. In the cases where that is not + # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS. + + archive_cmds='' + hardcode_direct=yes + hardcode_direct_absolute=yes + hardcode_libdir_separator=':' + link_all_deplibs=yes + file_list_spec='${wl}-f,' + + if test "$GCC" = yes; then + case $host_os in aix4.[012]|aix4.[012].*) + # We only want to do this on AIX 4.2 and lower, the check + # below for broken collect2 doesn't work under 4.3+ + collect2name=`${CC} -print-prog-name=collect2` + if test -f "$collect2name" && + strings "$collect2name" | $GREP resolve_lib_name >/dev/null + then + # We have reworked collect2 + : + else + # We have old collect2 + hardcode_direct=unsupported + # It fails to find uninstalled libraries when the uninstalled + # path is not listed in the libpath. Setting hardcode_minus_L + # to unsupported forces relinking + hardcode_minus_L=yes + hardcode_libdir_flag_spec='-L$libdir' + hardcode_libdir_separator= + fi + ;; + esac + shared_flag='-shared' + if test "$aix_use_runtimelinking" = yes; then + shared_flag="$shared_flag "'${wl}-G' + fi + link_all_deplibs=no + else + # not using gcc + if test "$host_cpu" = ia64; then + # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release + # chokes on -Wl,-G. The following line is correct: + shared_flag='-G' + else + if test "$aix_use_runtimelinking" = yes; then + shared_flag='${wl}-G' + else + shared_flag='${wl}-bM:SRE' + fi + fi + fi + + export_dynamic_flag_spec='${wl}-bexpall' + # It seems that -bexpall does not export symbols beginning with + # underscore (_), so it is better to generate a list of symbols to export. + always_export_symbols=yes + if test "$aix_use_runtimelinking" = yes; then + # Warning - without using the other runtime loading flags (-brtl), + # -berok will link without error, but may produce a broken library. + allow_undefined_flag='-berok' + # Determine the default libpath from the value encoded in an + # empty executable. + if test "${lt_cv_aix_libpath+set}" = set; then + aix_libpath=$lt_cv_aix_libpath +else + if ${lt_cv_aix_libpath_+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + + lt_aix_libpath_sed=' + /Import File Strings/,/^$/ { + /^0/ { + s/^0 *\([^ ]*\) *$/\1/ + p + } + }' + lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` + # Check for a 64-bit object if we didn't find anything. + if test -z "$lt_cv_aix_libpath_"; then + lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` + fi +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext + if test -z "$lt_cv_aix_libpath_"; then + lt_cv_aix_libpath_="/usr/lib:/lib" + fi + +fi + + aix_libpath=$lt_cv_aix_libpath_ +fi + + hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" + archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" + else + if test "$host_cpu" = ia64; then + hardcode_libdir_flag_spec='${wl}-R $libdir:/usr/lib:/lib' + allow_undefined_flag="-z nodefs" + archive_expsym_cmds="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols" + else + # Determine the default libpath from the value encoded in an + # empty executable. + if test "${lt_cv_aix_libpath+set}" = set; then + aix_libpath=$lt_cv_aix_libpath +else + if ${lt_cv_aix_libpath_+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + + lt_aix_libpath_sed=' + /Import File Strings/,/^$/ { + /^0/ { + s/^0 *\([^ ]*\) *$/\1/ + p + } + }' + lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` + # Check for a 64-bit object if we didn't find anything. + if test -z "$lt_cv_aix_libpath_"; then + lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` + fi +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext + if test -z "$lt_cv_aix_libpath_"; then + lt_cv_aix_libpath_="/usr/lib:/lib" + fi + +fi + + aix_libpath=$lt_cv_aix_libpath_ +fi + + hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" + # Warning - without using the other run time loading flags, + # -berok will link without error, but may produce a broken library. + no_undefined_flag=' ${wl}-bernotok' + allow_undefined_flag=' ${wl}-berok' + if test "$with_gnu_ld" = yes; then + # We only use this code for GNU lds that support --whole-archive. + whole_archive_flag_spec='${wl}--whole-archive$convenience ${wl}--no-whole-archive' + else + # Exported symbols can be pulled into shared objects from archives + whole_archive_flag_spec='$convenience' + fi + archive_cmds_need_lc=yes + # This is similar to how AIX traditionally builds its shared libraries. + archive_expsym_cmds="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname' + fi + fi + ;; + + amigaos*) + case $host_cpu in + powerpc) + # see comment about AmigaOS4 .so support + archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' + archive_expsym_cmds='' + ;; + m68k) + archive_cmds='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' + hardcode_libdir_flag_spec='-L$libdir' + hardcode_minus_L=yes + ;; + esac + ;; + + bsdi[45]*) + export_dynamic_flag_spec=-rdynamic + ;; + + cygwin* | mingw* | pw32* | cegcc*) + # When not using gcc, we currently assume that we are using + # Microsoft Visual C++. + # hardcode_libdir_flag_spec is actually meaningless, as there is + # no search path for DLLs. + case $cc_basename in + cl*) + # Native MSVC + hardcode_libdir_flag_spec=' ' + allow_undefined_flag=unsupported + always_export_symbols=yes + file_list_spec='@' + # Tell ltmain to make .lib files, not .a files. + libext=lib + # Tell ltmain to make .dll files, not .so files. + shrext_cmds=".dll" + # FIXME: Setting linknames here is a bad hack. + archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' + archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then + sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; + else + sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; + fi~ + $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ + linknames=' + # The linker will not automatically build a static lib if we build a DLL. + # _LT_TAGVAR(old_archive_from_new_cmds, )='true' + enable_shared_with_static_runtimes=yes + exclude_expsyms='_NULL_IMPORT_DESCRIPTOR|_IMPORT_DESCRIPTOR_.*' + export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' + # Don't use ranlib + old_postinstall_cmds='chmod 644 $oldlib' + postlink_cmds='lt_outputfile="@OUTPUT@"~ + lt_tool_outputfile="@TOOL_OUTPUT@"~ + case $lt_outputfile in + *.exe|*.EXE) ;; + *) + lt_outputfile="$lt_outputfile.exe" + lt_tool_outputfile="$lt_tool_outputfile.exe" + ;; + esac~ + if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then + $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; + $RM "$lt_outputfile.manifest"; + fi' + ;; + *) + # Assume MSVC wrapper + hardcode_libdir_flag_spec=' ' + allow_undefined_flag=unsupported + # Tell ltmain to make .lib files, not .a files. + libext=lib + # Tell ltmain to make .dll files, not .so files. + shrext_cmds=".dll" + # FIXME: Setting linknames here is a bad hack. + archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' + # The linker will automatically build a .lib file if we build a DLL. + old_archive_from_new_cmds='true' + # FIXME: Should let the user specify the lib program. + old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs' + enable_shared_with_static_runtimes=yes + ;; + esac + ;; + + darwin* | rhapsody*) + + + archive_cmds_need_lc=no + hardcode_direct=no + hardcode_automatic=yes + hardcode_shlibpath_var=unsupported + if test "$lt_cv_ld_force_load" = "yes"; then + whole_archive_flag_spec='`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience ${wl}-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`' + + else + whole_archive_flag_spec='' + fi + link_all_deplibs=yes + allow_undefined_flag="$_lt_dar_allow_undefined" + case $cc_basename in + ifort*) _lt_dar_can_shared=yes ;; + *) _lt_dar_can_shared=$GCC ;; + esac + if test "$_lt_dar_can_shared" = "yes"; then + output_verbose_link_cmd=func_echo_all + archive_cmds="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod${_lt_dsymutil}" + module_cmds="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dsymutil}" + archive_expsym_cmds="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring ${_lt_dar_single_mod}${_lt_dar_export_syms}${_lt_dsymutil}" + module_expsym_cmds="sed -e 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dar_export_syms}${_lt_dsymutil}" + + else + ld_shlibs=no + fi + + ;; + + dgux*) + archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' + hardcode_libdir_flag_spec='-L$libdir' + hardcode_shlibpath_var=no + ;; + + # FreeBSD 2.2.[012] allows us to include c++rt0.o to get C++ constructor + # support. Future versions do this automatically, but an explicit c++rt0.o + # does not break anything, and helps significantly (at the cost of a little + # extra space). + freebsd2.2*) + archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags /usr/lib/c++rt0.o' + hardcode_libdir_flag_spec='-R$libdir' + hardcode_direct=yes + hardcode_shlibpath_var=no + ;; + + # Unfortunately, older versions of FreeBSD 2 do not have this feature. + freebsd2.*) + archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' + hardcode_direct=yes + hardcode_minus_L=yes + hardcode_shlibpath_var=no + ;; + + # FreeBSD 3 and greater uses gcc -shared to do shared libraries. + freebsd* | dragonfly*) + archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' + hardcode_libdir_flag_spec='-R$libdir' + hardcode_direct=yes + hardcode_shlibpath_var=no + ;; + + hpux9*) + if test "$GCC" = yes; then + archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' + else + archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' + fi + hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' + hardcode_libdir_separator=: + hardcode_direct=yes + + # hardcode_minus_L: Not really in the search PATH, + # but as the default location of the library. + hardcode_minus_L=yes + export_dynamic_flag_spec='${wl}-E' + ;; + + hpux10*) + if test "$GCC" = yes && test "$with_gnu_ld" = no; then + archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' + else + archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' + fi + if test "$with_gnu_ld" = no; then + hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' + hardcode_libdir_separator=: + hardcode_direct=yes + hardcode_direct_absolute=yes + export_dynamic_flag_spec='${wl}-E' + # hardcode_minus_L: Not really in the search PATH, + # but as the default location of the library. + hardcode_minus_L=yes + fi + ;; + + hpux11*) + if test "$GCC" = yes && test "$with_gnu_ld" = no; then + case $host_cpu in + hppa*64*) + archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' + ;; + ia64*) + archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' + ;; + *) + archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' + ;; + esac + else + case $host_cpu in + hppa*64*) + archive_cmds='$CC -b ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' + ;; + ia64*) + archive_cmds='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' + ;; + *) + + # Older versions of the 11.00 compiler do not understand -b yet + # (HP92453-01 A.11.01.20 doesn't, HP92453-01 B.11.X.35175-35176.GP does) + { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $CC understands -b" >&5 +$as_echo_n "checking if $CC understands -b... " >&6; } +if ${lt_cv_prog_compiler__b+:} false; then : + $as_echo_n "(cached) " >&6 +else + lt_cv_prog_compiler__b=no + save_LDFLAGS="$LDFLAGS" + LDFLAGS="$LDFLAGS -b" + echo "$lt_simple_link_test_code" > conftest.$ac_ext + if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then + # The linker can only warn and ignore the option if not recognized + # So say no if there are warnings + if test -s conftest.err; then + # Append any errors to the config.log. + cat conftest.err 1>&5 + $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp + $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 + if diff conftest.exp conftest.er2 >/dev/null; then + lt_cv_prog_compiler__b=yes + fi + else + lt_cv_prog_compiler__b=yes + fi + fi + $RM -r conftest* + LDFLAGS="$save_LDFLAGS" + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler__b" >&5 +$as_echo "$lt_cv_prog_compiler__b" >&6; } + +if test x"$lt_cv_prog_compiler__b" = xyes; then + archive_cmds='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' +else + archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' +fi + + ;; + esac + fi + if test "$with_gnu_ld" = no; then + hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' + hardcode_libdir_separator=: + + case $host_cpu in + hppa*64*|ia64*) + hardcode_direct=no + hardcode_shlibpath_var=no + ;; + *) + hardcode_direct=yes + hardcode_direct_absolute=yes + export_dynamic_flag_spec='${wl}-E' + + # hardcode_minus_L: Not really in the search PATH, + # but as the default location of the library. + hardcode_minus_L=yes + ;; + esac + fi + ;; + + irix5* | irix6* | nonstopux*) + if test "$GCC" = yes; then + archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + # Try to use the -exported_symbol ld option, if it does not + # work, assume that -exports_file does not work either and + # implicitly export all symbols. + # This should be the same for all languages, so no per-tag cache variable. + { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5 +$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; } +if ${lt_cv_irix_exported_symbol+:} false; then : + $as_echo_n "(cached) " >&6 +else + save_LDFLAGS="$LDFLAGS" + LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +int foo (void) { return 0; } +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + lt_cv_irix_exported_symbol=yes +else + lt_cv_irix_exported_symbol=no +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext + LDFLAGS="$save_LDFLAGS" +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5 +$as_echo "$lt_cv_irix_exported_symbol" >&6; } + if test "$lt_cv_irix_exported_symbol" = yes; then + archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' + fi + else + archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' + archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib' + fi + archive_cmds_need_lc='no' + hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' + hardcode_libdir_separator=: + inherit_rpath=yes + link_all_deplibs=yes + ;; + + netbsd* | netbsdelf*-gnu) + if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then + archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' # a.out + else + archive_cmds='$LD -shared -o $lib $libobjs $deplibs $linker_flags' # ELF + fi + hardcode_libdir_flag_spec='-R$libdir' + hardcode_direct=yes + hardcode_shlibpath_var=no + ;; + + newsos6) + archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' + hardcode_direct=yes + hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' + hardcode_libdir_separator=: + hardcode_shlibpath_var=no + ;; + + *nto* | *qnx*) + ;; + + openbsd*) + if test -f /usr/libexec/ld.so; then + hardcode_direct=yes + hardcode_shlibpath_var=no + hardcode_direct_absolute=yes + if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then + archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' + archive_expsym_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags ${wl}-retain-symbols-file,$export_symbols' + hardcode_libdir_flag_spec='${wl}-rpath,$libdir' + export_dynamic_flag_spec='${wl}-E' + else + case $host_os in + openbsd[01].* | openbsd2.[0-7] | openbsd2.[0-7].*) + archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' + hardcode_libdir_flag_spec='-R$libdir' + ;; + *) + archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' + hardcode_libdir_flag_spec='${wl}-rpath,$libdir' + ;; + esac + fi + else + ld_shlibs=no + fi + ;; + + os2*) + hardcode_libdir_flag_spec='-L$libdir' + hardcode_minus_L=yes + allow_undefined_flag=unsupported + archive_cmds='$ECHO "LIBRARY $libname INITINSTANCE" > $output_objdir/$libname.def~$ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~echo DATA >> $output_objdir/$libname.def~echo " SINGLE NONSHARED" >> $output_objdir/$libname.def~echo EXPORTS >> $output_objdir/$libname.def~emxexp $libobjs >> $output_objdir/$libname.def~$CC -Zdll -Zcrtdll -o $lib $libobjs $deplibs $compiler_flags $output_objdir/$libname.def' + old_archive_from_new_cmds='emximp -o $output_objdir/$libname.a $output_objdir/$libname.def' + ;; + + osf3*) + if test "$GCC" = yes; then + allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*' + archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + else + allow_undefined_flag=' -expect_unresolved \*' + archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' + fi + archive_cmds_need_lc='no' + hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' + hardcode_libdir_separator=: + ;; + + osf4* | osf5*) # as osf3* with the addition of -msym flag + if test "$GCC" = yes; then + allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*' + archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' + else + allow_undefined_flag=' -expect_unresolved \*' + archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' + archive_expsym_cmds='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; printf "%s\\n" "-hidden">> $lib.exp~ + $CC -shared${allow_undefined_flag} ${wl}-input ${wl}$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib~$RM $lib.exp' + + # Both c and cxx compiler support -rpath directly + hardcode_libdir_flag_spec='-rpath $libdir' + fi + archive_cmds_need_lc='no' + hardcode_libdir_separator=: + ;; + + solaris*) + no_undefined_flag=' -z defs' + if test "$GCC" = yes; then + wlarc='${wl}' + archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' + archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ + $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' + else + case `$CC -V 2>&1` in + *"Compilers 5.0"*) + wlarc='' + archive_cmds='$LD -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $linker_flags' + archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ + $LD -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp' + ;; + *) + wlarc='${wl}' + archive_cmds='$CC -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $compiler_flags' + archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ + $CC -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' + ;; + esac + fi + hardcode_libdir_flag_spec='-R$libdir' + hardcode_shlibpath_var=no + case $host_os in + solaris2.[0-5] | solaris2.[0-5].*) ;; + *) + # The compiler driver will combine and reorder linker options, + # but understands `-z linker_flag'. GCC discards it without `$wl', + # but is careful enough not to reorder. + # Supported since Solaris 2.6 (maybe 2.5.1?) + if test "$GCC" = yes; then + whole_archive_flag_spec='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract' + else + whole_archive_flag_spec='-z allextract$convenience -z defaultextract' + fi + ;; + esac + link_all_deplibs=yes + ;; + + sunos4*) + if test "x$host_vendor" = xsequent; then + # Use $CC to link under sequent, because it throws in some extra .o + # files that make .init and .fini sections work. + archive_cmds='$CC -G ${wl}-h $soname -o $lib $libobjs $deplibs $compiler_flags' + else + archive_cmds='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags' + fi + hardcode_libdir_flag_spec='-L$libdir' + hardcode_direct=yes + hardcode_minus_L=yes + hardcode_shlibpath_var=no + ;; + + sysv4) + case $host_vendor in + sni) + archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' + hardcode_direct=yes # is this really true??? + ;; + siemens) + ## LD is ld it makes a PLAMLIB + ## CC just makes a GrossModule. + archive_cmds='$LD -G -o $lib $libobjs $deplibs $linker_flags' + reload_cmds='$CC -r -o $output$reload_objs' + hardcode_direct=no + ;; + motorola) + archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' + hardcode_direct=no #Motorola manual says yes, but my tests say they lie + ;; + esac + runpath_var='LD_RUN_PATH' + hardcode_shlibpath_var=no + ;; + + sysv4.3*) + archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' + hardcode_shlibpath_var=no + export_dynamic_flag_spec='-Bexport' + ;; + + sysv4*MP*) + if test -d /usr/nec; then + archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' + hardcode_shlibpath_var=no + runpath_var=LD_RUN_PATH + hardcode_runpath_var=yes + ld_shlibs=yes + fi + ;; + + sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[01].[10]* | unixware7* | sco3.2v5.0.[024]*) + no_undefined_flag='${wl}-z,text' + archive_cmds_need_lc=no + hardcode_shlibpath_var=no + runpath_var='LD_RUN_PATH' + + if test "$GCC" = yes; then + archive_cmds='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' + archive_expsym_cmds='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' + else + archive_cmds='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' + archive_expsym_cmds='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' + fi + ;; + + sysv5* | sco3.2v5* | sco5v6*) + # Note: We can NOT use -z defs as we might desire, because we do not + # link with -lc, and that would cause any symbols used from libc to + # always be unresolved, which means just about no library would + # ever link correctly. If we're not using GNU ld we use -z text + # though, which does catch some bad symbols but isn't as heavy-handed + # as -z defs. + no_undefined_flag='${wl}-z,text' + allow_undefined_flag='${wl}-z,nodefs' + archive_cmds_need_lc=no + hardcode_shlibpath_var=no + hardcode_libdir_flag_spec='${wl}-R,$libdir' + hardcode_libdir_separator=':' + link_all_deplibs=yes + export_dynamic_flag_spec='${wl}-Bexport' + runpath_var='LD_RUN_PATH' + + if test "$GCC" = yes; then + archive_cmds='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' + archive_expsym_cmds='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' + else + archive_cmds='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' + archive_expsym_cmds='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' + fi + ;; + + uts4*) + archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' + hardcode_libdir_flag_spec='-L$libdir' + hardcode_shlibpath_var=no + ;; + + *) + ld_shlibs=no + ;; + esac + + if test x$host_vendor = xsni; then + case $host in + sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*) + export_dynamic_flag_spec='${wl}-Blargedynsym' + ;; + esac + fi + fi + +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs" >&5 +$as_echo "$ld_shlibs" >&6; } +test "$ld_shlibs" = no && can_build_shared=no + +with_gnu_ld=$with_gnu_ld + + + + + + + + + + + + + + + +# +# Do we need to explicitly link libc? +# +case "x$archive_cmds_need_lc" in +x|xyes) + # Assume -lc should be added + archive_cmds_need_lc=yes + + if test "$enable_shared" = yes && test "$GCC" = yes; then + case $archive_cmds in + *'~'*) + # FIXME: we may have to deal with multi-command sequences. + ;; + '$CC '*) + # Test whether the compiler implicitly links with -lc since on some + # systems, -lgcc has to come before -lc. If gcc already passes -lc + # to ld, don't add -lc before -lgcc. + { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether -lc should be explicitly linked in" >&5 +$as_echo_n "checking whether -lc should be explicitly linked in... " >&6; } +if ${lt_cv_archive_cmds_need_lc+:} false; then : + $as_echo_n "(cached) " >&6 +else + $RM conftest* + echo "$lt_simple_compile_test_code" > conftest.$ac_ext + + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 + (eval $ac_compile) 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } 2>conftest.err; then + soname=conftest + lib=conftest + libobjs=conftest.$ac_objext + deplibs= + wl=$lt_prog_compiler_wl + pic_flag=$lt_prog_compiler_pic + compiler_flags=-v + linker_flags=-v + verstring= + output_objdir=. + libname=conftest + lt_save_allow_undefined_flag=$allow_undefined_flag + allow_undefined_flag= + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$archive_cmds 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1\""; } >&5 + (eval $archive_cmds 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1) 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } + then + lt_cv_archive_cmds_need_lc=no + else + lt_cv_archive_cmds_need_lc=yes + fi + allow_undefined_flag=$lt_save_allow_undefined_flag + else + cat conftest.err 1>&5 + fi + $RM conftest* + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_archive_cmds_need_lc" >&5 +$as_echo "$lt_cv_archive_cmds_need_lc" >&6; } + archive_cmds_need_lc=$lt_cv_archive_cmds_need_lc + ;; + esac + fi + ;; +esac + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking dynamic linker characteristics" >&5 +$as_echo_n "checking dynamic linker characteristics... " >&6; } + +if test "$GCC" = yes; then + case $host_os in + darwin*) lt_awk_arg="/^libraries:/,/LR/" ;; + *) lt_awk_arg="/^libraries:/" ;; + esac + case $host_os in + mingw* | cegcc*) lt_sed_strip_eq="s,=\([A-Za-z]:\),\1,g" ;; + *) lt_sed_strip_eq="s,=/,/,g" ;; + esac + lt_search_path_spec=`$CC -print-search-dirs | awk $lt_awk_arg | $SED -e "s/^libraries://" -e $lt_sed_strip_eq` + case $lt_search_path_spec in + *\;*) + # if the path contains ";" then we assume it to be the separator + # otherwise default to the standard path separator (i.e. ":") - it is + # assumed that no part of a normal pathname contains ";" but that should + # okay in the real world where ";" in dirpaths is itself problematic. + lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED 's/;/ /g'` + ;; + *) + lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED "s/$PATH_SEPARATOR/ /g"` + ;; + esac + # Ok, now we have the path, separated by spaces, we can step through it + # and add multilib dir if necessary. + lt_tmp_lt_search_path_spec= + lt_multi_os_dir=`$CC $CPPFLAGS $CFLAGS $LDFLAGS -print-multi-os-directory 2>/dev/null` + for lt_sys_path in $lt_search_path_spec; do + if test -d "$lt_sys_path/$lt_multi_os_dir"; then + lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path/$lt_multi_os_dir" + else + test -d "$lt_sys_path" && \ + lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path" + fi + done + lt_search_path_spec=`$ECHO "$lt_tmp_lt_search_path_spec" | awk ' +BEGIN {RS=" "; FS="/|\n";} { + lt_foo=""; + lt_count=0; + for (lt_i = NF; lt_i > 0; lt_i--) { + if ($lt_i != "" && $lt_i != ".") { + if ($lt_i == "..") { + lt_count++; + } else { + if (lt_count == 0) { + lt_foo="/" $lt_i lt_foo; + } else { + lt_count--; + } + } + } + } + if (lt_foo != "") { lt_freq[lt_foo]++; } + if (lt_freq[lt_foo] == 1) { print lt_foo; } +}'` + # AWK program above erroneously prepends '/' to C:/dos/paths + # for these hosts. + case $host_os in + mingw* | cegcc*) lt_search_path_spec=`$ECHO "$lt_search_path_spec" |\ + $SED 's,/\([A-Za-z]:\),\1,g'` ;; + esac + sys_lib_search_path_spec=`$ECHO "$lt_search_path_spec" | $lt_NL2SP` +else + sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib" +fi +library_names_spec= +libname_spec='lib$name' +soname_spec= +shrext_cmds=".so" +postinstall_cmds= +postuninstall_cmds= +finish_cmds= +finish_eval= +shlibpath_var= +shlibpath_overrides_runpath=unknown +version_type=none +dynamic_linker="$host_os ld.so" +sys_lib_dlsearch_path_spec="/lib /usr/lib" +need_lib_prefix=unknown +hardcode_into_libs=no + +# when you set need_version to no, make sure it does not cause -set_version +# flags to be left without arguments +need_version=unknown + +case $host_os in +aix3*) + version_type=linux # correct to gnu/linux during the next big refactor + library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a' + shlibpath_var=LIBPATH + + # AIX 3 has no versioning support, so we append a major version to the name. + soname_spec='${libname}${release}${shared_ext}$major' + ;; + +aix[4-9]*) + version_type=linux # correct to gnu/linux during the next big refactor + need_lib_prefix=no + need_version=no + hardcode_into_libs=yes + if test "$host_cpu" = ia64; then + # AIX 5 supports IA64 + library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}' + shlibpath_var=LD_LIBRARY_PATH + else + # With GCC up to 2.95.x, collect2 would create an import file + # for dependence libraries. The import file would start with + # the line `#! .'. This would cause the generated library to + # depend on `.', always an invalid library. This was fixed in + # development snapshots of GCC prior to 3.0. + case $host_os in + aix4 | aix4.[01] | aix4.[01].*) + if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)' + echo ' yes ' + echo '#endif'; } | ${CC} -E - | $GREP yes > /dev/null; then + : + else + can_build_shared=no + fi + ;; + esac + # AIX (on Power*) has no versioning support, so currently we can not hardcode correct + # soname into executable. Probably we can add versioning support to + # collect2, so additional links can be useful in future. + if test "$aix_use_runtimelinking" = yes; then + # If using run time linking (on AIX 4.2 or later) use lib.so + # instead of lib.a to let people know that these are not + # typical AIX shared libraries. + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + else + # We preserve .a as extension for shared libraries through AIX4.2 + # and later when we are not doing run time linking. + library_names_spec='${libname}${release}.a $libname.a' + soname_spec='${libname}${release}${shared_ext}$major' + fi + shlibpath_var=LIBPATH + fi + ;; + +amigaos*) + case $host_cpu in + powerpc) + # Since July 2007 AmigaOS4 officially supports .so libraries. + # When compiling the executable, add -use-dynld -Lsobjs: to the compileline. + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + ;; + m68k) + library_names_spec='$libname.ixlibrary $libname.a' + # Create ${libname}_ixlibrary.a entries in /sys/libs. + finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([^/]*\)\.ixlibrary$%\1%'\''`; test $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done' + ;; + esac + ;; + +beos*) + library_names_spec='${libname}${shared_ext}' + dynamic_linker="$host_os ld.so" + shlibpath_var=LIBRARY_PATH + ;; + +bsdi[45]*) + version_type=linux # correct to gnu/linux during the next big refactor + need_version=no + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir' + shlibpath_var=LD_LIBRARY_PATH + sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib" + sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib" + # the default ld.so.conf also contains /usr/contrib/lib and + # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow + # libtool to hard-code these into programs + ;; + +cygwin* | mingw* | pw32* | cegcc*) + version_type=windows + shrext_cmds=".dll" + need_version=no + need_lib_prefix=no + + case $GCC,$cc_basename in + yes,*) + # gcc + library_names_spec='$libname.dll.a' + # DLL is installed to $(libdir)/../bin by postinstall_cmds + postinstall_cmds='base_file=`basename \${file}`~ + dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ + dldir=$destdir/`dirname \$dlpath`~ + test -d \$dldir || mkdir -p \$dldir~ + $install_prog $dir/$dlname \$dldir/$dlname~ + chmod a+x \$dldir/$dlname~ + if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then + eval '\''$striplib \$dldir/$dlname'\'' || exit \$?; + fi' + postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ + dlpath=$dir/\$dldll~ + $RM \$dlpath' + shlibpath_overrides_runpath=yes + + case $host_os in + cygwin*) + # Cygwin DLLs use 'cyg' prefix rather than 'lib' + soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' + + sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/lib/w32api" + ;; + mingw* | cegcc*) + # MinGW DLLs use traditional 'lib' prefix + soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' + ;; + pw32*) + # pw32 DLLs use 'pw' prefix rather than 'lib' + library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' + ;; + esac + dynamic_linker='Win32 ld.exe' + ;; + + *,cl*) + # Native MSVC + libname_spec='$name' + soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' + library_names_spec='${libname}.dll.lib' + + case $build_os in + mingw*) + sys_lib_search_path_spec= + lt_save_ifs=$IFS + IFS=';' + for lt_path in $LIB + do + IFS=$lt_save_ifs + # Let DOS variable expansion print the short 8.3 style file name. + lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"` + sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path" + done + IFS=$lt_save_ifs + # Convert to MSYS style. + sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'` + ;; + cygwin*) + # Convert to unix form, then to dos form, then back to unix form + # but this time dos style (no spaces!) so that the unix form looks + # like /cygdrive/c/PROGRA~1:/cygdr... + sys_lib_search_path_spec=`cygpath --path --unix "$LIB"` + sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null` + sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` + ;; + *) + sys_lib_search_path_spec="$LIB" + if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then + # It is most probably a Windows format PATH. + sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` + else + sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` + fi + # FIXME: find the short name or the path components, as spaces are + # common. (e.g. "Program Files" -> "PROGRA~1") + ;; + esac + + # DLL is installed to $(libdir)/../bin by postinstall_cmds + postinstall_cmds='base_file=`basename \${file}`~ + dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ + dldir=$destdir/`dirname \$dlpath`~ + test -d \$dldir || mkdir -p \$dldir~ + $install_prog $dir/$dlname \$dldir/$dlname' + postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ + dlpath=$dir/\$dldll~ + $RM \$dlpath' + shlibpath_overrides_runpath=yes + dynamic_linker='Win32 link.exe' + ;; + + *) + # Assume MSVC wrapper + library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib' + dynamic_linker='Win32 ld.exe' + ;; + esac + # FIXME: first we should search . and the directory the executable is in + shlibpath_var=PATH + ;; + +darwin* | rhapsody*) + dynamic_linker="$host_os dyld" + version_type=darwin + need_lib_prefix=no + need_version=no + library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext' + soname_spec='${libname}${release}${major}$shared_ext' + shlibpath_overrides_runpath=yes + shlibpath_var=DYLD_LIBRARY_PATH + shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`' + + sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/local/lib" + sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib' + ;; + +dgux*) + version_type=linux # correct to gnu/linux during the next big refactor + need_lib_prefix=no + need_version=no + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext' + soname_spec='${libname}${release}${shared_ext}$major' + shlibpath_var=LD_LIBRARY_PATH + ;; + +freebsd* | dragonfly*) + # DragonFly does not have aout. When/if they implement a new + # versioning mechanism, adjust this. + if test -x /usr/bin/objformat; then + objformat=`/usr/bin/objformat` + else + case $host_os in + freebsd[23].*) objformat=aout ;; + *) objformat=elf ;; + esac + fi + version_type=freebsd-$objformat + case $version_type in + freebsd-elf*) + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' + need_version=no + need_lib_prefix=no + ;; + freebsd-*) + library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix' + need_version=yes + ;; + esac + shlibpath_var=LD_LIBRARY_PATH + case $host_os in + freebsd2.*) + shlibpath_overrides_runpath=yes + ;; + freebsd3.[01]* | freebsdelf3.[01]*) + shlibpath_overrides_runpath=yes + hardcode_into_libs=yes + ;; + freebsd3.[2-9]* | freebsdelf3.[2-9]* | \ + freebsd4.[0-5] | freebsdelf4.[0-5] | freebsd4.1.1 | freebsdelf4.1.1) + shlibpath_overrides_runpath=no + hardcode_into_libs=yes + ;; + *) # from 4.6 on, and DragonFly + shlibpath_overrides_runpath=yes + hardcode_into_libs=yes + ;; + esac + ;; + +haiku*) + version_type=linux # correct to gnu/linux during the next big refactor + need_lib_prefix=no + need_version=no + dynamic_linker="$host_os runtime_loader" + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + shlibpath_var=LIBRARY_PATH + shlibpath_overrides_runpath=yes + sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib' + hardcode_into_libs=yes + ;; + +hpux9* | hpux10* | hpux11*) + # Give a soname corresponding to the major version so that dld.sl refuses to + # link against other versions. + version_type=sunos + need_lib_prefix=no + need_version=no + case $host_cpu in + ia64*) + shrext_cmds='.so' + hardcode_into_libs=yes + dynamic_linker="$host_os dld.so" + shlibpath_var=LD_LIBRARY_PATH + shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + if test "X$HPUX_IA64_MODE" = X32; then + sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib" + else + sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64" + fi + sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec + ;; + hppa*64*) + shrext_cmds='.sl' + hardcode_into_libs=yes + dynamic_linker="$host_os dld.sl" + shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH + shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64" + sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec + ;; + *) + shrext_cmds='.sl' + dynamic_linker="$host_os dld.sl" + shlibpath_var=SHLIB_PATH + shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + ;; + esac + # HP-UX runs *really* slowly unless shared libraries are mode 555, ... + postinstall_cmds='chmod 555 $lib' + # or fails outright, so override atomically: + install_override_mode=555 + ;; + +interix[3-9]*) + version_type=linux # correct to gnu/linux during the next big refactor + need_lib_prefix=no + need_version=no + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)' + shlibpath_var=LD_LIBRARY_PATH + shlibpath_overrides_runpath=no + hardcode_into_libs=yes + ;; + +irix5* | irix6* | nonstopux*) + case $host_os in + nonstopux*) version_type=nonstopux ;; + *) + if test "$lt_cv_prog_gnu_ld" = yes; then + version_type=linux # correct to gnu/linux during the next big refactor + else + version_type=irix + fi ;; + esac + need_lib_prefix=no + need_version=no + soname_spec='${libname}${release}${shared_ext}$major' + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}' + case $host_os in + irix5* | nonstopux*) + libsuff= shlibsuff= + ;; + *) + case $LD in # libtool.m4 will add one of these switches to LD + *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ") + libsuff= shlibsuff= libmagic=32-bit;; + *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ") + libsuff=32 shlibsuff=N32 libmagic=N32;; + *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ") + libsuff=64 shlibsuff=64 libmagic=64-bit;; + *) libsuff= shlibsuff= libmagic=never-match;; + esac + ;; + esac + shlibpath_var=LD_LIBRARY${shlibsuff}_PATH + shlibpath_overrides_runpath=no + sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}" + sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}" + hardcode_into_libs=yes + ;; + +# No shared lib support for Linux oldld, aout, or coff. +linux*oldld* | linux*aout* | linux*coff*) + dynamic_linker=no + ;; + +# This must be glibc/ELF. +linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) + version_type=linux # correct to gnu/linux during the next big refactor + need_lib_prefix=no + need_version=no + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir' + shlibpath_var=LD_LIBRARY_PATH + shlibpath_overrides_runpath=no + + # Some binutils ld are patched to set DT_RUNPATH + if ${lt_cv_shlibpath_overrides_runpath+:} false; then : + $as_echo_n "(cached) " >&6 +else + lt_cv_shlibpath_overrides_runpath=no + save_LDFLAGS=$LDFLAGS + save_libdir=$libdir + eval "libdir=/foo; wl=\"$lt_prog_compiler_wl\"; \ + LDFLAGS=\"\$LDFLAGS $hardcode_libdir_flag_spec\"" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + if ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null; then : + lt_cv_shlibpath_overrides_runpath=yes +fi +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext + LDFLAGS=$save_LDFLAGS + libdir=$save_libdir + +fi + + shlibpath_overrides_runpath=$lt_cv_shlibpath_overrides_runpath + + # This implies no fast_install, which is unacceptable. + # Some rework will be needed to allow for fast_install + # before this can be enabled. + hardcode_into_libs=yes + + # Append ld.so.conf contents to the search path + if test -f /etc/ld.so.conf; then + lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \$2)); skip = 1; } { if (!skip) print \$0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[ ]*hwcap[ ]/d;s/[:, ]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;s/"//g;/^$/d' | tr '\n' ' '` + sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra" + fi + + # We used to test for /lib/ld.so.1 and disable shared libraries on + # powerpc, because MkLinux only supported shared libraries with the + # GNU dynamic linker. Since this was broken with cross compilers, + # most powerpc-linux boxes support dynamic linking these days and + # people can always --disable-shared, the test was removed, and we + # assume the GNU/Linux dynamic linker is in use. + dynamic_linker='GNU/Linux ld.so' + ;; + +netbsdelf*-gnu) + version_type=linux + need_lib_prefix=no + need_version=no + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + shlibpath_var=LD_LIBRARY_PATH + shlibpath_overrides_runpath=no + hardcode_into_libs=yes + dynamic_linker='NetBSD ld.elf_so' + ;; + +netbsd*) + version_type=sunos + need_lib_prefix=no + need_version=no + if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' + finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' + dynamic_linker='NetBSD (a.out) ld.so' + else + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + dynamic_linker='NetBSD ld.elf_so' + fi + shlibpath_var=LD_LIBRARY_PATH + shlibpath_overrides_runpath=yes + hardcode_into_libs=yes + ;; + +newsos6) + version_type=linux # correct to gnu/linux during the next big refactor + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + shlibpath_var=LD_LIBRARY_PATH + shlibpath_overrides_runpath=yes + ;; + +*nto* | *qnx*) + version_type=qnx + need_lib_prefix=no + need_version=no + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + shlibpath_var=LD_LIBRARY_PATH + shlibpath_overrides_runpath=no + hardcode_into_libs=yes + dynamic_linker='ldqnx.so' + ;; + +openbsd*) + version_type=sunos + sys_lib_dlsearch_path_spec="/usr/lib" + need_lib_prefix=no + # Some older versions of OpenBSD (3.3 at least) *do* need versioned libs. + case $host_os in + openbsd3.3 | openbsd3.3.*) need_version=yes ;; + *) need_version=no ;; + esac + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' + finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' + shlibpath_var=LD_LIBRARY_PATH + if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then + case $host_os in + openbsd2.[89] | openbsd2.[89].*) + shlibpath_overrides_runpath=no + ;; + *) + shlibpath_overrides_runpath=yes + ;; + esac + else + shlibpath_overrides_runpath=yes + fi + ;; + +os2*) + libname_spec='$name' + shrext_cmds=".dll" + need_lib_prefix=no + library_names_spec='$libname${shared_ext} $libname.a' + dynamic_linker='OS/2 ld.exe' + shlibpath_var=LIBPATH + ;; + +osf3* | osf4* | osf5*) + version_type=osf + need_lib_prefix=no + need_version=no + soname_spec='${libname}${release}${shared_ext}$major' + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + shlibpath_var=LD_LIBRARY_PATH + sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib" + sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec" + ;; + +rdos*) + dynamic_linker=no + ;; + +solaris*) + version_type=linux # correct to gnu/linux during the next big refactor + need_lib_prefix=no + need_version=no + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + shlibpath_var=LD_LIBRARY_PATH + shlibpath_overrides_runpath=yes + hardcode_into_libs=yes + # ldd complains unless libraries are executable + postinstall_cmds='chmod +x $lib' + ;; + +sunos4*) + version_type=sunos + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' + finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir' + shlibpath_var=LD_LIBRARY_PATH + shlibpath_overrides_runpath=yes + if test "$with_gnu_ld" = yes; then + need_lib_prefix=no + fi + need_version=yes + ;; + +sysv4 | sysv4.3*) + version_type=linux # correct to gnu/linux during the next big refactor + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + shlibpath_var=LD_LIBRARY_PATH + case $host_vendor in + sni) + shlibpath_overrides_runpath=no + need_lib_prefix=no + runpath_var=LD_RUN_PATH + ;; + siemens) + need_lib_prefix=no + ;; + motorola) + need_lib_prefix=no + need_version=no + shlibpath_overrides_runpath=no + sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib' + ;; + esac + ;; + +sysv4*MP*) + if test -d /usr/nec ;then + version_type=linux # correct to gnu/linux during the next big refactor + library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}' + soname_spec='$libname${shared_ext}.$major' + shlibpath_var=LD_LIBRARY_PATH + fi + ;; + +sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) + version_type=freebsd-elf + need_lib_prefix=no + need_version=no + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + shlibpath_var=LD_LIBRARY_PATH + shlibpath_overrides_runpath=yes + hardcode_into_libs=yes + if test "$with_gnu_ld" = yes; then + sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib' + else + sys_lib_search_path_spec='/usr/ccs/lib /usr/lib' + case $host_os in + sco3.2v5*) + sys_lib_search_path_spec="$sys_lib_search_path_spec /lib" + ;; + esac + fi + sys_lib_dlsearch_path_spec='/usr/lib' + ;; + +tpf*) + # TPF is a cross-target only. Preferred cross-host = GNU/Linux. + version_type=linux # correct to gnu/linux during the next big refactor + need_lib_prefix=no + need_version=no + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + shlibpath_var=LD_LIBRARY_PATH + shlibpath_overrides_runpath=no + hardcode_into_libs=yes + ;; + +uts4*) + version_type=linux # correct to gnu/linux during the next big refactor + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + shlibpath_var=LD_LIBRARY_PATH + ;; + +*) + dynamic_linker=no + ;; +esac +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $dynamic_linker" >&5 +$as_echo "$dynamic_linker" >&6; } +test "$dynamic_linker" = no && can_build_shared=no + +variables_saved_for_relink="PATH $shlibpath_var $runpath_var" +if test "$GCC" = yes; then + variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH" +fi + +if test "${lt_cv_sys_lib_search_path_spec+set}" = set; then + sys_lib_search_path_spec="$lt_cv_sys_lib_search_path_spec" +fi +if test "${lt_cv_sys_lib_dlsearch_path_spec+set}" = set; then + sys_lib_dlsearch_path_spec="$lt_cv_sys_lib_dlsearch_path_spec" +fi + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to hardcode library paths into programs" >&5 +$as_echo_n "checking how to hardcode library paths into programs... " >&6; } +hardcode_action= +if test -n "$hardcode_libdir_flag_spec" || + test -n "$runpath_var" || + test "X$hardcode_automatic" = "Xyes" ; then + + # We can hardcode non-existent directories. + if test "$hardcode_direct" != no && + # If the only mechanism to avoid hardcoding is shlibpath_var, we + # have to relink, otherwise we might link with an installed library + # when we should be linking with a yet-to-be-installed one + ## test "$_LT_TAGVAR(hardcode_shlibpath_var, )" != no && + test "$hardcode_minus_L" != no; then + # Linking always hardcodes the temporary library directory. + hardcode_action=relink + else + # We can link without hardcoding, and we can hardcode nonexisting dirs. + hardcode_action=immediate + fi +else + # We cannot hardcode anything, or else we can only hardcode existing + # directories. + hardcode_action=unsupported +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $hardcode_action" >&5 +$as_echo "$hardcode_action" >&6; } + +if test "$hardcode_action" = relink || + test "$inherit_rpath" = yes; then + # Fast installation is not supported + enable_fast_install=no +elif test "$shlibpath_overrides_runpath" = yes || + test "$enable_shared" = no; then + # Fast installation is not necessary + enable_fast_install=needless +fi + + + + + + + if test "x$enable_dlopen" != xyes; then + enable_dlopen=unknown + enable_dlopen_self=unknown + enable_dlopen_self_static=unknown +else + lt_cv_dlopen=no + lt_cv_dlopen_libs= + + case $host_os in + beos*) + lt_cv_dlopen="load_add_on" + lt_cv_dlopen_libs= + lt_cv_dlopen_self=yes + ;; + + mingw* | pw32* | cegcc*) + lt_cv_dlopen="LoadLibrary" + lt_cv_dlopen_libs= + ;; + + cygwin*) + lt_cv_dlopen="dlopen" + lt_cv_dlopen_libs= + ;; + + darwin*) + # if libdl is installed we need to link against it + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5 +$as_echo_n "checking for dlopen in -ldl... " >&6; } +if ${ac_cv_lib_dl_dlopen+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_check_lib_save_LIBS=$LIBS +LIBS="-ldl $LIBS" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. */ +#ifdef __cplusplus +extern "C" +#endif +char dlopen (); +int +main () +{ +return dlopen (); + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + ac_cv_lib_dl_dlopen=yes +else + ac_cv_lib_dl_dlopen=no +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +LIBS=$ac_check_lib_save_LIBS +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5 +$as_echo "$ac_cv_lib_dl_dlopen" >&6; } +if test "x$ac_cv_lib_dl_dlopen" = xyes; then : + lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl" +else + + lt_cv_dlopen="dyld" + lt_cv_dlopen_libs= + lt_cv_dlopen_self=yes + +fi + + ;; + + *) + ac_fn_c_check_func "$LINENO" "shl_load" "ac_cv_func_shl_load" +if test "x$ac_cv_func_shl_load" = xyes; then : + lt_cv_dlopen="shl_load" +else + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for shl_load in -ldld" >&5 +$as_echo_n "checking for shl_load in -ldld... " >&6; } +if ${ac_cv_lib_dld_shl_load+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_check_lib_save_LIBS=$LIBS +LIBS="-ldld $LIBS" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. */ +#ifdef __cplusplus +extern "C" +#endif +char shl_load (); +int +main () +{ +return shl_load (); + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + ac_cv_lib_dld_shl_load=yes +else + ac_cv_lib_dld_shl_load=no +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +LIBS=$ac_check_lib_save_LIBS +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dld_shl_load" >&5 +$as_echo "$ac_cv_lib_dld_shl_load" >&6; } +if test "x$ac_cv_lib_dld_shl_load" = xyes; then : + lt_cv_dlopen="shl_load" lt_cv_dlopen_libs="-ldld" +else + ac_fn_c_check_func "$LINENO" "dlopen" "ac_cv_func_dlopen" +if test "x$ac_cv_func_dlopen" = xyes; then : + lt_cv_dlopen="dlopen" +else + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5 +$as_echo_n "checking for dlopen in -ldl... " >&6; } +if ${ac_cv_lib_dl_dlopen+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_check_lib_save_LIBS=$LIBS +LIBS="-ldl $LIBS" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. */ +#ifdef __cplusplus +extern "C" +#endif +char dlopen (); +int +main () +{ +return dlopen (); + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + ac_cv_lib_dl_dlopen=yes +else + ac_cv_lib_dl_dlopen=no +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +LIBS=$ac_check_lib_save_LIBS +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5 +$as_echo "$ac_cv_lib_dl_dlopen" >&6; } +if test "x$ac_cv_lib_dl_dlopen" = xyes; then : + lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl" +else + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -lsvld" >&5 +$as_echo_n "checking for dlopen in -lsvld... " >&6; } +if ${ac_cv_lib_svld_dlopen+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_check_lib_save_LIBS=$LIBS +LIBS="-lsvld $LIBS" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. */ +#ifdef __cplusplus +extern "C" +#endif +char dlopen (); +int +main () +{ +return dlopen (); + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + ac_cv_lib_svld_dlopen=yes +else + ac_cv_lib_svld_dlopen=no +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +LIBS=$ac_check_lib_save_LIBS +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_svld_dlopen" >&5 +$as_echo "$ac_cv_lib_svld_dlopen" >&6; } +if test "x$ac_cv_lib_svld_dlopen" = xyes; then : + lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-lsvld" +else + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dld_link in -ldld" >&5 +$as_echo_n "checking for dld_link in -ldld... " >&6; } +if ${ac_cv_lib_dld_dld_link+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_check_lib_save_LIBS=$LIBS +LIBS="-ldld $LIBS" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. */ +#ifdef __cplusplus +extern "C" +#endif +char dld_link (); +int +main () +{ +return dld_link (); + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + ac_cv_lib_dld_dld_link=yes +else + ac_cv_lib_dld_dld_link=no +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +LIBS=$ac_check_lib_save_LIBS +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dld_dld_link" >&5 +$as_echo "$ac_cv_lib_dld_dld_link" >&6; } +if test "x$ac_cv_lib_dld_dld_link" = xyes; then : + lt_cv_dlopen="dld_link" lt_cv_dlopen_libs="-ldld" +fi + + +fi + + +fi + + +fi + + +fi + + +fi + + ;; + esac + + if test "x$lt_cv_dlopen" != xno; then + enable_dlopen=yes + else + enable_dlopen=no + fi + + case $lt_cv_dlopen in + dlopen) + save_CPPFLAGS="$CPPFLAGS" + test "x$ac_cv_header_dlfcn_h" = xyes && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H" + + save_LDFLAGS="$LDFLAGS" + wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $export_dynamic_flag_spec\" + + save_LIBS="$LIBS" + LIBS="$lt_cv_dlopen_libs $LIBS" + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether a program can dlopen itself" >&5 +$as_echo_n "checking whether a program can dlopen itself... " >&6; } +if ${lt_cv_dlopen_self+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test "$cross_compiling" = yes; then : + lt_cv_dlopen_self=cross +else + lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 + lt_status=$lt_dlunknown + cat > conftest.$ac_ext <<_LT_EOF +#line $LINENO "configure" +#include "confdefs.h" + +#if HAVE_DLFCN_H +#include +#endif + +#include + +#ifdef RTLD_GLOBAL +# define LT_DLGLOBAL RTLD_GLOBAL +#else +# ifdef DL_GLOBAL +# define LT_DLGLOBAL DL_GLOBAL +# else +# define LT_DLGLOBAL 0 +# endif +#endif + +/* We may have to define LT_DLLAZY_OR_NOW in the command line if we + find out it does not work in some platform. */ +#ifndef LT_DLLAZY_OR_NOW +# ifdef RTLD_LAZY +# define LT_DLLAZY_OR_NOW RTLD_LAZY +# else +# ifdef DL_LAZY +# define LT_DLLAZY_OR_NOW DL_LAZY +# else +# ifdef RTLD_NOW +# define LT_DLLAZY_OR_NOW RTLD_NOW +# else +# ifdef DL_NOW +# define LT_DLLAZY_OR_NOW DL_NOW +# else +# define LT_DLLAZY_OR_NOW 0 +# endif +# endif +# endif +# endif +#endif + +/* When -fvisbility=hidden is used, assume the code has been annotated + correspondingly for the symbols needed. */ +#if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) +int fnord () __attribute__((visibility("default"))); +#endif + +int fnord () { return 42; } +int main () +{ + void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); + int status = $lt_dlunknown; + + if (self) + { + if (dlsym (self,"fnord")) status = $lt_dlno_uscore; + else + { + if (dlsym( self,"_fnord")) status = $lt_dlneed_uscore; + else puts (dlerror ()); + } + /* dlclose (self); */ + } + else + puts (dlerror ()); + + return status; +} +_LT_EOF + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 + (eval $ac_link) 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && test -s conftest${ac_exeext} 2>/dev/null; then + (./conftest; exit; ) >&5 2>/dev/null + lt_status=$? + case x$lt_status in + x$lt_dlno_uscore) lt_cv_dlopen_self=yes ;; + x$lt_dlneed_uscore) lt_cv_dlopen_self=yes ;; + x$lt_dlunknown|x*) lt_cv_dlopen_self=no ;; + esac + else : + # compilation failed + lt_cv_dlopen_self=no + fi +fi +rm -fr conftest* + + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_dlopen_self" >&5 +$as_echo "$lt_cv_dlopen_self" >&6; } + + if test "x$lt_cv_dlopen_self" = xyes; then + wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $lt_prog_compiler_static\" + { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether a statically linked program can dlopen itself" >&5 +$as_echo_n "checking whether a statically linked program can dlopen itself... " >&6; } +if ${lt_cv_dlopen_self_static+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test "$cross_compiling" = yes; then : + lt_cv_dlopen_self_static=cross +else + lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 + lt_status=$lt_dlunknown + cat > conftest.$ac_ext <<_LT_EOF +#line $LINENO "configure" +#include "confdefs.h" + +#if HAVE_DLFCN_H +#include +#endif + +#include + +#ifdef RTLD_GLOBAL +# define LT_DLGLOBAL RTLD_GLOBAL +#else +# ifdef DL_GLOBAL +# define LT_DLGLOBAL DL_GLOBAL +# else +# define LT_DLGLOBAL 0 +# endif +#endif + +/* We may have to define LT_DLLAZY_OR_NOW in the command line if we + find out it does not work in some platform. */ +#ifndef LT_DLLAZY_OR_NOW +# ifdef RTLD_LAZY +# define LT_DLLAZY_OR_NOW RTLD_LAZY +# else +# ifdef DL_LAZY +# define LT_DLLAZY_OR_NOW DL_LAZY +# else +# ifdef RTLD_NOW +# define LT_DLLAZY_OR_NOW RTLD_NOW +# else +# ifdef DL_NOW +# define LT_DLLAZY_OR_NOW DL_NOW +# else +# define LT_DLLAZY_OR_NOW 0 +# endif +# endif +# endif +# endif +#endif + +/* When -fvisbility=hidden is used, assume the code has been annotated + correspondingly for the symbols needed. */ +#if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) +int fnord () __attribute__((visibility("default"))); +#endif + +int fnord () { return 42; } +int main () +{ + void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); + int status = $lt_dlunknown; + + if (self) + { + if (dlsym (self,"fnord")) status = $lt_dlno_uscore; + else + { + if (dlsym( self,"_fnord")) status = $lt_dlneed_uscore; + else puts (dlerror ()); + } + /* dlclose (self); */ + } + else + puts (dlerror ()); + + return status; +} +_LT_EOF + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 + (eval $ac_link) 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && test -s conftest${ac_exeext} 2>/dev/null; then + (./conftest; exit; ) >&5 2>/dev/null + lt_status=$? + case x$lt_status in + x$lt_dlno_uscore) lt_cv_dlopen_self_static=yes ;; + x$lt_dlneed_uscore) lt_cv_dlopen_self_static=yes ;; + x$lt_dlunknown|x*) lt_cv_dlopen_self_static=no ;; + esac + else : + # compilation failed + lt_cv_dlopen_self_static=no + fi +fi +rm -fr conftest* + + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_dlopen_self_static" >&5 +$as_echo "$lt_cv_dlopen_self_static" >&6; } + fi + + CPPFLAGS="$save_CPPFLAGS" + LDFLAGS="$save_LDFLAGS" + LIBS="$save_LIBS" + ;; + esac + + case $lt_cv_dlopen_self in + yes|no) enable_dlopen_self=$lt_cv_dlopen_self ;; + *) enable_dlopen_self=unknown ;; + esac + + case $lt_cv_dlopen_self_static in + yes|no) enable_dlopen_self_static=$lt_cv_dlopen_self_static ;; + *) enable_dlopen_self_static=unknown ;; + esac +fi + + + + + + + + + + + + + + + + + +striplib= +old_striplib= +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether stripping libraries is possible" >&5 +$as_echo_n "checking whether stripping libraries is possible... " >&6; } +if test -n "$STRIP" && $STRIP -V 2>&1 | $GREP "GNU strip" >/dev/null; then + test -z "$old_striplib" && old_striplib="$STRIP --strip-debug" + test -z "$striplib" && striplib="$STRIP --strip-unneeded" + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } +else +# FIXME - insert some real tests, host_os isn't really good enough + case $host_os in + darwin*) + if test -n "$STRIP" ; then + striplib="$STRIP -x" + old_striplib="$STRIP -S" + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + fi + ;; + *) + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + ;; + esac +fi + + + + + + + + + + + + + # Report which library types will actually be built + { $as_echo "$as_me:${as_lineno-$LINENO}: checking if libtool supports shared libraries" >&5 +$as_echo_n "checking if libtool supports shared libraries... " >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $can_build_shared" >&5 +$as_echo "$can_build_shared" >&6; } + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build shared libraries" >&5 +$as_echo_n "checking whether to build shared libraries... " >&6; } + test "$can_build_shared" = "no" && enable_shared=no + + # On AIX, shared libraries and static libraries use the same namespace, and + # are all built from PIC. + case $host_os in + aix3*) + test "$enable_shared" = yes && enable_static=no + if test -n "$RANLIB"; then + archive_cmds="$archive_cmds~\$RANLIB \$lib" + postinstall_cmds='$RANLIB $lib' + fi + ;; + + aix[4-9]*) + if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then + test "$enable_shared" = yes && enable_static=no + fi + ;; + esac + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_shared" >&5 +$as_echo "$enable_shared" >&6; } + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build static libraries" >&5 +$as_echo_n "checking whether to build static libraries... " >&6; } + # Make sure either enable_shared or enable_static is yes. + test "$enable_shared" = yes || enable_static=yes + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_static" >&5 +$as_echo "$enable_static" >&6; } + + + + +fi +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + +CC="$lt_save_CC" + + + + + + + + + + + + + + + + ac_config_commands="$ac_config_commands libtool" + + + + +# Only expand once: + + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether byte ordering is bigendian" >&5 +$as_echo_n "checking whether byte ordering is bigendian... " >&6; } +if ${ac_cv_c_bigendian+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_cv_c_bigendian=unknown + # See if we're dealing with a universal compiler. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#ifndef __APPLE_CC__ + not a universal capable compiler + #endif + typedef int dummy; + +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + + # Check for potential -arch flags. It is not universal unless + # there are at least two -arch flags with different values. + ac_arch= + ac_prev= + for ac_word in $CC $CFLAGS $CPPFLAGS $LDFLAGS; do + if test -n "$ac_prev"; then + case $ac_word in + i?86 | x86_64 | ppc | ppc64) + if test -z "$ac_arch" || test "$ac_arch" = "$ac_word"; then + ac_arch=$ac_word + else + ac_cv_c_bigendian=universal + break + fi + ;; + esac + ac_prev= + elif test "x$ac_word" = "x-arch"; then + ac_prev=arch + fi + done +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + if test $ac_cv_c_bigendian = unknown; then + # See if sys/param.h defines the BYTE_ORDER macro. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include + #include + +int +main () +{ +#if ! (defined BYTE_ORDER && defined BIG_ENDIAN \ + && defined LITTLE_ENDIAN && BYTE_ORDER && BIG_ENDIAN \ + && LITTLE_ENDIAN) + bogus endian macros + #endif + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + # It does; now see whether it defined to BIG_ENDIAN or not. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include + #include + +int +main () +{ +#if BYTE_ORDER != BIG_ENDIAN + not big endian + #endif + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_c_bigendian=yes +else + ac_cv_c_bigendian=no +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + fi + if test $ac_cv_c_bigendian = unknown; then + # See if defines _LITTLE_ENDIAN or _BIG_ENDIAN (e.g., Solaris). + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include + +int +main () +{ +#if ! (defined _LITTLE_ENDIAN || defined _BIG_ENDIAN) + bogus endian macros + #endif + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + # It does; now see whether it defined to _BIG_ENDIAN or not. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include + +int +main () +{ +#ifndef _BIG_ENDIAN + not big endian + #endif + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_c_bigendian=yes +else + ac_cv_c_bigendian=no +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + fi + if test $ac_cv_c_bigendian = unknown; then + # Compile a test program. + if test "$cross_compiling" = yes; then : + # Try to guess by grepping values from an object file. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +short int ascii_mm[] = + { 0x4249, 0x4765, 0x6E44, 0x6961, 0x6E53, 0x7953, 0 }; + short int ascii_ii[] = + { 0x694C, 0x5454, 0x656C, 0x6E45, 0x6944, 0x6E61, 0 }; + int use_ascii (int i) { + return ascii_mm[i] + ascii_ii[i]; + } + short int ebcdic_ii[] = + { 0x89D3, 0xE3E3, 0x8593, 0x95C5, 0x89C4, 0x9581, 0 }; + short int ebcdic_mm[] = + { 0xC2C9, 0xC785, 0x95C4, 0x8981, 0x95E2, 0xA8E2, 0 }; + int use_ebcdic (int i) { + return ebcdic_mm[i] + ebcdic_ii[i]; + } + extern int foo; + +int +main () +{ +return use_ascii (foo) == use_ebcdic (foo); + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + if grep BIGenDianSyS conftest.$ac_objext >/dev/null; then + ac_cv_c_bigendian=yes + fi + if grep LiTTleEnDian conftest.$ac_objext >/dev/null ; then + if test "$ac_cv_c_bigendian" = unknown; then + ac_cv_c_bigendian=no + else + # finding both strings is unlikely to happen, but who knows? + ac_cv_c_bigendian=unknown + fi + fi +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +$ac_includes_default +int +main () +{ + + /* Are we little or big endian? From Harbison&Steele. */ + union + { + long int l; + char c[sizeof (long int)]; + } u; + u.l = 1; + return u.c[sizeof (long int) - 1] == 1; + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_run "$LINENO"; then : + ac_cv_c_bigendian=no +else + ac_cv_c_bigendian=yes +fi +rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ + conftest.$ac_objext conftest.beam conftest.$ac_ext +fi + + fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_bigendian" >&5 +$as_echo "$ac_cv_c_bigendian" >&6; } + case $ac_cv_c_bigendian in #( + yes) + $as_echo "#define WORDS_BIGENDIAN 1" >>confdefs.h +;; #( + no) + ;; #( + universal) + +$as_echo "#define AC_APPLE_UNIVERSAL_BUILD 1" >>confdefs.h + + ;; #( + *) + as_fn_error $? "unknown endianness + presetting ac_cv_c_bigendian=no (or yes) will help" "$LINENO" 5 ;; + esac + + +# Check whether --enable-largefile was given. +if test "${enable_largefile+set}" = set; then : + enableval=$enable_largefile; +fi + +if test "$enable_largefile" != no; then + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for special C compiler options needed for large files" >&5 +$as_echo_n "checking for special C compiler options needed for large files... " >&6; } +if ${ac_cv_sys_largefile_CC+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_cv_sys_largefile_CC=no + if test "$GCC" != yes; then + ac_save_CC=$CC + while :; do + # IRIX 6.2 and later do not support large files by default, + # so use the C compiler's -n32 option if that helps. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include + /* Check that off_t can represent 2**63 - 1 correctly. + We can't simply define LARGE_OFF_T to be 9223372036854775807, + since some C++ compilers masquerading as C compilers + incorrectly reject 9223372036854775807. */ +#define LARGE_OFF_T ((((off_t) 1 << 31) << 31) - 1 + (((off_t) 1 << 31) << 31)) + int off_t_is_large[(LARGE_OFF_T % 2147483629 == 721 + && LARGE_OFF_T % 2147483647 == 1) + ? 1 : -1]; +int +main () +{ + + ; + return 0; +} +_ACEOF + if ac_fn_c_try_compile "$LINENO"; then : + break +fi +rm -f core conftest.err conftest.$ac_objext + CC="$CC -n32" + if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_sys_largefile_CC=' -n32'; break +fi +rm -f core conftest.err conftest.$ac_objext + break + done + CC=$ac_save_CC + rm -f conftest.$ac_ext + fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_sys_largefile_CC" >&5 +$as_echo "$ac_cv_sys_largefile_CC" >&6; } + if test "$ac_cv_sys_largefile_CC" != no; then + CC=$CC$ac_cv_sys_largefile_CC + fi + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for _FILE_OFFSET_BITS value needed for large files" >&5 +$as_echo_n "checking for _FILE_OFFSET_BITS value needed for large files... " >&6; } +if ${ac_cv_sys_file_offset_bits+:} false; then : + $as_echo_n "(cached) " >&6 +else + while :; do + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include + /* Check that off_t can represent 2**63 - 1 correctly. + We can't simply define LARGE_OFF_T to be 9223372036854775807, + since some C++ compilers masquerading as C compilers + incorrectly reject 9223372036854775807. */ +#define LARGE_OFF_T ((((off_t) 1 << 31) << 31) - 1 + (((off_t) 1 << 31) << 31)) + int off_t_is_large[(LARGE_OFF_T % 2147483629 == 721 + && LARGE_OFF_T % 2147483647 == 1) + ? 1 : -1]; +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_sys_file_offset_bits=no; break +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#define _FILE_OFFSET_BITS 64 +#include + /* Check that off_t can represent 2**63 - 1 correctly. + We can't simply define LARGE_OFF_T to be 9223372036854775807, + since some C++ compilers masquerading as C compilers + incorrectly reject 9223372036854775807. */ +#define LARGE_OFF_T ((((off_t) 1 << 31) << 31) - 1 + (((off_t) 1 << 31) << 31)) + int off_t_is_large[(LARGE_OFF_T % 2147483629 == 721 + && LARGE_OFF_T % 2147483647 == 1) + ? 1 : -1]; +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_sys_file_offset_bits=64; break +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + ac_cv_sys_file_offset_bits=unknown + break +done +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_sys_file_offset_bits" >&5 +$as_echo "$ac_cv_sys_file_offset_bits" >&6; } +case $ac_cv_sys_file_offset_bits in #( + no | unknown) ;; + *) +cat >>confdefs.h <<_ACEOF +#define _FILE_OFFSET_BITS $ac_cv_sys_file_offset_bits +_ACEOF +;; +esac +rm -rf conftest* + if test $ac_cv_sys_file_offset_bits = unknown; then + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for _LARGE_FILES value needed for large files" >&5 +$as_echo_n "checking for _LARGE_FILES value needed for large files... " >&6; } +if ${ac_cv_sys_large_files+:} false; then : + $as_echo_n "(cached) " >&6 +else + while :; do + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include + /* Check that off_t can represent 2**63 - 1 correctly. + We can't simply define LARGE_OFF_T to be 9223372036854775807, + since some C++ compilers masquerading as C compilers + incorrectly reject 9223372036854775807. */ +#define LARGE_OFF_T ((((off_t) 1 << 31) << 31) - 1 + (((off_t) 1 << 31) << 31)) + int off_t_is_large[(LARGE_OFF_T % 2147483629 == 721 + && LARGE_OFF_T % 2147483647 == 1) + ? 1 : -1]; +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_sys_large_files=no; break +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#define _LARGE_FILES 1 +#include + /* Check that off_t can represent 2**63 - 1 correctly. + We can't simply define LARGE_OFF_T to be 9223372036854775807, + since some C++ compilers masquerading as C compilers + incorrectly reject 9223372036854775807. */ +#define LARGE_OFF_T ((((off_t) 1 << 31) << 31) - 1 + (((off_t) 1 << 31) << 31)) + int off_t_is_large[(LARGE_OFF_T % 2147483629 == 721 + && LARGE_OFF_T % 2147483647 == 1) + ? 1 : -1]; +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_sys_large_files=1; break +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + ac_cv_sys_large_files=unknown + break +done +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_sys_large_files" >&5 +$as_echo "$ac_cv_sys_large_files" >&6; } +case $ac_cv_sys_large_files in #( + no | unknown) ;; + *) +cat >>confdefs.h <<_ACEOF +#define _LARGE_FILES $ac_cv_sys_large_files +_ACEOF +;; +esac +rm -rf conftest* + fi + + +fi + + +# Configure parameters + +# Check whether --with-openssl was given. +if test "${with_openssl+set}" = set; then : + withval=$with_openssl; use_openssl=$withval +else + use_openssl=auto +fi + + +# Check whether --with-libgcrypt was given. +if test "${with_libgcrypt+set}" = set; then : + withval=$with_libgcrypt; use_libgcrypt=$withval +else + use_libgcrypt=auto +fi + + +# Check whether --with-wincng was given. +if test "${with_wincng+set}" = set; then : + withval=$with_wincng; use_wincng=$withval +else + use_wincng=auto +fi + + +# Check whether --with-libz was given. +if test "${with_libz+set}" = set; then : + withval=$with_libz; use_libz=$withval +else + use_libz=auto +fi + + +found_crypto=none +support_clear_memory=no + +# Look for OpenSSL +if test "$found_crypto" = "none" && test "$use_openssl" != "no"; then + + if test "X$prefix" = "XNONE"; then + acl_final_prefix="$ac_default_prefix" + else + acl_final_prefix="$prefix" + fi + if test "X$exec_prefix" = "XNONE"; then + acl_final_exec_prefix='${prefix}' + else + acl_final_exec_prefix="$exec_prefix" + fi + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + eval acl_final_exec_prefix=\"$acl_final_exec_prefix\" + prefix="$acl_save_prefix" + + +# Check whether --with-gnu-ld was given. +if test "${with_gnu_ld+set}" = set; then : + withval=$with_gnu_ld; test "$withval" = no || with_gnu_ld=yes +else + with_gnu_ld=no +fi + +# Prepare PATH_SEPARATOR. +# The user is always right. +if test "${PATH_SEPARATOR+set}" != set; then + echo "#! /bin/sh" >conf$$.sh + echo "exit 0" >>conf$$.sh + chmod +x conf$$.sh + if (PATH="/nonexistent;."; conf$$.sh) >/dev/null 2>&1; then + PATH_SEPARATOR=';' + else + PATH_SEPARATOR=: + fi + rm -f conf$$.sh +fi +ac_prog=ld +if test "$GCC" = yes; then + # Check if gcc -print-prog-name=ld gives a path. + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ld used by GCC" >&5 +$as_echo_n "checking for ld used by GCC... " >&6; } + case $host in + *-*-mingw*) + # gcc leaves a trailing carriage return which upsets mingw + ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;; + *) + ac_prog=`($CC -print-prog-name=ld) 2>&5` ;; + esac + case $ac_prog in + # Accept absolute paths. + [\\/]* | [A-Za-z]:[\\/]*) + re_direlt='/[^/][^/]*/\.\./' + # Canonicalize the path of ld + ac_prog=`echo $ac_prog| sed 's%\\\\%/%g'` + while echo $ac_prog | grep "$re_direlt" > /dev/null 2>&1; do + ac_prog=`echo $ac_prog| sed "s%$re_direlt%/%"` + done + test -z "$LD" && LD="$ac_prog" + ;; + "") + # If it fails, then pretend we aren't using GCC. + ac_prog=ld + ;; + *) + # If it is relative, then search for the first ld in PATH. + with_gnu_ld=unknown + ;; + esac +elif test "$with_gnu_ld" = yes; then + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU ld" >&5 +$as_echo_n "checking for GNU ld... " >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for non-GNU ld" >&5 +$as_echo_n "checking for non-GNU ld... " >&6; } +fi +if ${acl_cv_path_LD+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -z "$LD"; then + IFS="${IFS= }"; ac_save_ifs="$IFS"; IFS="${IFS}${PATH_SEPARATOR-:}" + for ac_dir in $PATH; do + test -z "$ac_dir" && ac_dir=. + if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then + acl_cv_path_LD="$ac_dir/$ac_prog" + # Check to see if the program is GNU ld. I'd rather use --version, + # but apparently some GNU ld's only accept -v. + # Break only if it was the GNU/non-GNU ld that we prefer. + case `"$acl_cv_path_LD" -v 2>&1 < /dev/null` in + *GNU* | *'with BFD'*) + test "$with_gnu_ld" != no && break ;; + *) + test "$with_gnu_ld" != yes && break ;; + esac + fi + done + IFS="$ac_save_ifs" +else + acl_cv_path_LD="$LD" # Let the user override the test with a path. +fi +fi + +LD="$acl_cv_path_LD" +if test -n "$LD"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LD" >&5 +$as_echo "$LD" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi +test -z "$LD" && as_fn_error $? "no acceptable ld found in \$PATH" "$LINENO" 5 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if the linker ($LD) is GNU ld" >&5 +$as_echo_n "checking if the linker ($LD) is GNU ld... " >&6; } +if ${acl_cv_prog_gnu_ld+:} false; then : + $as_echo_n "(cached) " >&6 +else + # I'd rather use --version here, but apparently some GNU ld's only accept -v. +case `$LD -v 2>&1 &5 +$as_echo "$acl_cv_prog_gnu_ld" >&6; } +with_gnu_ld=$acl_cv_prog_gnu_ld + + + + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for shared library run path origin" >&5 +$as_echo_n "checking for shared library run path origin... " >&6; } +if ${acl_cv_rpath+:} false; then : + $as_echo_n "(cached) " >&6 +else + + CC="$CC" GCC="$GCC" LDFLAGS="$LDFLAGS" LD="$LD" with_gnu_ld="$with_gnu_ld" \ + ${CONFIG_SHELL-/bin/sh} "$ac_aux_dir/config.rpath" "$host" > conftest.sh + . ./conftest.sh + rm -f ./conftest.sh + acl_cv_rpath=done + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $acl_cv_rpath" >&5 +$as_echo "$acl_cv_rpath" >&6; } + wl="$acl_cv_wl" + acl_libext="$acl_cv_libext" + acl_shlibext="$acl_cv_shlibext" + acl_libname_spec="$acl_cv_libname_spec" + acl_library_names_spec="$acl_cv_library_names_spec" + acl_hardcode_libdir_flag_spec="$acl_cv_hardcode_libdir_flag_spec" + acl_hardcode_libdir_separator="$acl_cv_hardcode_libdir_separator" + acl_hardcode_direct="$acl_cv_hardcode_direct" + acl_hardcode_minus_L="$acl_cv_hardcode_minus_L" + # Check whether --enable-rpath was given. +if test "${enable_rpath+set}" = set; then : + enableval=$enable_rpath; : +else + enable_rpath=yes +fi + + + + acl_libdirstem=lib + searchpath=`(LC_ALL=C $CC -print-search-dirs) 2>/dev/null | sed -n -e 's,^libraries: ,,p' | sed -e 's,^=,,'` + if test -n "$searchpath"; then + acl_save_IFS="${IFS= }"; IFS=":" + for searchdir in $searchpath; do + if test -d "$searchdir"; then + case "$searchdir" in + */lib64/ | */lib64 ) acl_libdirstem=lib64 ;; + *) searchdir=`cd "$searchdir" && pwd` + case "$searchdir" in + */lib64 ) acl_libdirstem=lib64 ;; + esac ;; + esac + fi + done + IFS="$acl_save_IFS" + fi + + + + + + + + + + + + use_additional=yes + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + + eval additional_includedir=\"$includedir\" + eval additional_libdir=\"$libdir\" + + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + +# Check whether --with-libssl-prefix was given. +if test "${with_libssl_prefix+set}" = set; then : + withval=$with_libssl_prefix; + if test "X$withval" = "Xno"; then + use_additional=no + else + if test "X$withval" = "X"; then + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + + eval additional_includedir=\"$includedir\" + eval additional_libdir=\"$libdir\" + + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + else + additional_includedir="$withval/include" + additional_libdir="$withval/$acl_libdirstem" + fi + fi + +fi + + LIBSSL= + LTLIBSSL= + INCSSL= + LIBSSL_PREFIX= + rpathdirs= + ltrpathdirs= + names_already_handled= + names_next_round='ssl crypto' + while test -n "$names_next_round"; do + names_this_round="$names_next_round" + names_next_round= + for name in $names_this_round; do + already_handled= + for n in $names_already_handled; do + if test "$n" = "$name"; then + already_handled=yes + break + fi + done + if test -z "$already_handled"; then + names_already_handled="$names_already_handled $name" + uppername=`echo "$name" | sed -e 'y|abcdefghijklmnopqrstuvwxyz./-|ABCDEFGHIJKLMNOPQRSTUVWXYZ___|'` + eval value=\"\$HAVE_LIB$uppername\" + if test -n "$value"; then + if test "$value" = yes; then + eval value=\"\$LIB$uppername\" + test -z "$value" || LIBSSL="${LIBSSL}${LIBSSL:+ }$value" + eval value=\"\$LTLIB$uppername\" + test -z "$value" || LTLIBSSL="${LTLIBSSL}${LTLIBSSL:+ }$value" + else + : + fi + else + found_dir= + found_la= + found_so= + found_a= + eval libname=\"$acl_libname_spec\" # typically: libname=lib$name + if test -n "$acl_shlibext"; then + shrext=".$acl_shlibext" # typically: shrext=.so + else + shrext= + fi + if test $use_additional = yes; then + dir="$additional_libdir" + if test -n "$acl_shlibext"; then + if test -f "$dir/$libname$shrext"; then + found_dir="$dir" + found_so="$dir/$libname$shrext" + else + if test "$acl_library_names_spec" = '$libname$shrext$versuffix'; then + ver=`(cd "$dir" && \ + for f in "$libname$shrext".*; do echo "$f"; done \ + | sed -e "s,^$libname$shrext\\\\.,," \ + | sort -t '.' -n -r -k1,1 -k2,2 -k3,3 -k4,4 -k5,5 \ + | sed 1q ) 2>/dev/null` + if test -n "$ver" && test -f "$dir/$libname$shrext.$ver"; then + found_dir="$dir" + found_so="$dir/$libname$shrext.$ver" + fi + else + eval library_names=\"$acl_library_names_spec\" + for f in $library_names; do + if test -f "$dir/$f"; then + found_dir="$dir" + found_so="$dir/$f" + break + fi + done + fi + fi + fi + if test "X$found_dir" = "X"; then + if test -f "$dir/$libname.$acl_libext"; then + found_dir="$dir" + found_a="$dir/$libname.$acl_libext" + fi + fi + if test "X$found_dir" != "X"; then + if test -f "$dir/$libname.la"; then + found_la="$dir/$libname.la" + fi + fi + fi + if test "X$found_dir" = "X"; then + for x in $LDFLAGS $LTLIBSSL; do + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + eval x=\"$x\" + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + case "$x" in + -L*) + dir=`echo "X$x" | sed -e 's/^X-L//'` + if test -n "$acl_shlibext"; then + if test -f "$dir/$libname$shrext"; then + found_dir="$dir" + found_so="$dir/$libname$shrext" + else + if test "$acl_library_names_spec" = '$libname$shrext$versuffix'; then + ver=`(cd "$dir" && \ + for f in "$libname$shrext".*; do echo "$f"; done \ + | sed -e "s,^$libname$shrext\\\\.,," \ + | sort -t '.' -n -r -k1,1 -k2,2 -k3,3 -k4,4 -k5,5 \ + | sed 1q ) 2>/dev/null` + if test -n "$ver" && test -f "$dir/$libname$shrext.$ver"; then + found_dir="$dir" + found_so="$dir/$libname$shrext.$ver" + fi + else + eval library_names=\"$acl_library_names_spec\" + for f in $library_names; do + if test -f "$dir/$f"; then + found_dir="$dir" + found_so="$dir/$f" + break + fi + done + fi + fi + fi + if test "X$found_dir" = "X"; then + if test -f "$dir/$libname.$acl_libext"; then + found_dir="$dir" + found_a="$dir/$libname.$acl_libext" + fi + fi + if test "X$found_dir" != "X"; then + if test -f "$dir/$libname.la"; then + found_la="$dir/$libname.la" + fi + fi + ;; + esac + if test "X$found_dir" != "X"; then + break + fi + done + fi + if test "X$found_dir" != "X"; then + LTLIBSSL="${LTLIBSSL}${LTLIBSSL:+ }-L$found_dir -l$name" + if test "X$found_so" != "X"; then + if test "$enable_rpath" = no || test "X$found_dir" = "X/usr/$acl_libdirstem"; then + LIBSSL="${LIBSSL}${LIBSSL:+ }$found_so" + else + haveit= + for x in $ltrpathdirs; do + if test "X$x" = "X$found_dir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + ltrpathdirs="$ltrpathdirs $found_dir" + fi + if test "$acl_hardcode_direct" = yes; then + LIBSSL="${LIBSSL}${LIBSSL:+ }$found_so" + else + if test -n "$acl_hardcode_libdir_flag_spec" && test "$acl_hardcode_minus_L" = no; then + LIBSSL="${LIBSSL}${LIBSSL:+ }$found_so" + haveit= + for x in $rpathdirs; do + if test "X$x" = "X$found_dir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + rpathdirs="$rpathdirs $found_dir" + fi + else + haveit= + for x in $LDFLAGS $LIBSSL; do + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + eval x=\"$x\" + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + if test "X$x" = "X-L$found_dir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + LIBSSL="${LIBSSL}${LIBSSL:+ }-L$found_dir" + fi + if test "$acl_hardcode_minus_L" != no; then + LIBSSL="${LIBSSL}${LIBSSL:+ }$found_so" + else + LIBSSL="${LIBSSL}${LIBSSL:+ }-l$name" + fi + fi + fi + fi + else + if test "X$found_a" != "X"; then + LIBSSL="${LIBSSL}${LIBSSL:+ }$found_a" + else + LIBSSL="${LIBSSL}${LIBSSL:+ }-L$found_dir -l$name" + fi + fi + additional_includedir= + case "$found_dir" in + */$acl_libdirstem | */$acl_libdirstem/) + basedir=`echo "X$found_dir" | sed -e 's,^X,,' -e "s,/$acl_libdirstem/"'*$,,'` + LIBSSL_PREFIX="$basedir" + additional_includedir="$basedir/include" + ;; + esac + if test "X$additional_includedir" != "X"; then + if test "X$additional_includedir" != "X/usr/include"; then + haveit= + if test "X$additional_includedir" = "X/usr/local/include"; then + if test -n "$GCC"; then + case $host_os in + linux* | gnu* | k*bsd*-gnu) haveit=yes;; + esac + fi + fi + if test -z "$haveit"; then + for x in $CPPFLAGS $INCSSL; do + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + eval x=\"$x\" + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + if test "X$x" = "X-I$additional_includedir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + if test -d "$additional_includedir"; then + INCSSL="${INCSSL}${INCSSL:+ }-I$additional_includedir" + fi + fi + fi + fi + fi + if test -n "$found_la"; then + save_libdir="$libdir" + case "$found_la" in + */* | *\\*) . "$found_la" ;; + *) . "./$found_la" ;; + esac + libdir="$save_libdir" + for dep in $dependency_libs; do + case "$dep" in + -L*) + additional_libdir=`echo "X$dep" | sed -e 's/^X-L//'` + if test "X$additional_libdir" != "X/usr/$acl_libdirstem"; then + haveit= + if test "X$additional_libdir" = "X/usr/local/$acl_libdirstem"; then + if test -n "$GCC"; then + case $host_os in + linux* | gnu* | k*bsd*-gnu) haveit=yes;; + esac + fi + fi + if test -z "$haveit"; then + haveit= + for x in $LDFLAGS $LIBSSL; do + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + eval x=\"$x\" + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + if test "X$x" = "X-L$additional_libdir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + if test -d "$additional_libdir"; then + LIBSSL="${LIBSSL}${LIBSSL:+ }-L$additional_libdir" + fi + fi + haveit= + for x in $LDFLAGS $LTLIBSSL; do + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + eval x=\"$x\" + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + if test "X$x" = "X-L$additional_libdir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + if test -d "$additional_libdir"; then + LTLIBSSL="${LTLIBSSL}${LTLIBSSL:+ }-L$additional_libdir" + fi + fi + fi + fi + ;; + -R*) + dir=`echo "X$dep" | sed -e 's/^X-R//'` + if test "$enable_rpath" != no; then + haveit= + for x in $rpathdirs; do + if test "X$x" = "X$dir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + rpathdirs="$rpathdirs $dir" + fi + haveit= + for x in $ltrpathdirs; do + if test "X$x" = "X$dir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + ltrpathdirs="$ltrpathdirs $dir" + fi + fi + ;; + -l*) + names_next_round="$names_next_round "`echo "X$dep" | sed -e 's/^X-l//'` + ;; + *.la) + names_next_round="$names_next_round "`echo "X$dep" | sed -e 's,^X.*/,,' -e 's,^lib,,' -e 's,\.la$,,'` + ;; + *) + LIBSSL="${LIBSSL}${LIBSSL:+ }$dep" + LTLIBSSL="${LTLIBSSL}${LTLIBSSL:+ }$dep" + ;; + esac + done + fi + else + LIBSSL="${LIBSSL}${LIBSSL:+ }-l$name" + LTLIBSSL="${LTLIBSSL}${LTLIBSSL:+ }-l$name" + fi + fi + fi + done + done + if test "X$rpathdirs" != "X"; then + if test -n "$acl_hardcode_libdir_separator"; then + alldirs= + for found_dir in $rpathdirs; do + alldirs="${alldirs}${alldirs:+$acl_hardcode_libdir_separator}$found_dir" + done + acl_save_libdir="$libdir" + libdir="$alldirs" + eval flag=\"$acl_hardcode_libdir_flag_spec\" + libdir="$acl_save_libdir" + LIBSSL="${LIBSSL}${LIBSSL:+ }$flag" + else + for found_dir in $rpathdirs; do + acl_save_libdir="$libdir" + libdir="$found_dir" + eval flag=\"$acl_hardcode_libdir_flag_spec\" + libdir="$acl_save_libdir" + LIBSSL="${LIBSSL}${LIBSSL:+ }$flag" + done + fi + fi + if test "X$ltrpathdirs" != "X"; then + for found_dir in $ltrpathdirs; do + LTLIBSSL="${LTLIBSSL}${LTLIBSSL:+ }-R$found_dir" + done + fi + + + ac_save_CPPFLAGS="$CPPFLAGS" + + for element in $INCSSL; do + haveit= + for x in $CPPFLAGS; do + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + eval x=\"$x\" + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + if test "X$x" = "X$element"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + CPPFLAGS="${CPPFLAGS}${CPPFLAGS:+ }$element" + fi + done + + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for libssl" >&5 +$as_echo_n "checking for libssl... " >&6; } +if ${ac_cv_libssl+:} false; then : + $as_echo_n "(cached) " >&6 +else + + ac_save_LIBS="$LIBS" + LIBS="$LIBS $LIBSSL" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + ac_cv_libssl=yes +else + ac_cv_libssl=no +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext + LIBS="$ac_save_LIBS" + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_libssl" >&5 +$as_echo "$ac_cv_libssl" >&6; } + if test "$ac_cv_libssl" = yes; then + HAVE_LIBSSL=yes + +$as_echo "#define HAVE_LIBSSL 1" >>confdefs.h + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to link with libssl" >&5 +$as_echo_n "checking how to link with libssl... " >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LIBSSL" >&5 +$as_echo "$LIBSSL" >&6; } + else + HAVE_LIBSSL=no + CPPFLAGS="$ac_save_CPPFLAGS" + LIBSSL= + LTLIBSSL= + LIBSSL_PREFIX= + fi + + + + + + + +fi +if test "$ac_cv_libssl" = "yes"; then + +$as_echo "#define LIBSSH2_OPENSSL 1" >>confdefs.h + + LIBSREQUIRED=libssl,libcrypto + + # Not all OpenSSL have AES-CTR functions. + save_LIBS="$LIBS" + LIBS="$LIBS $LIBSSL" + for ac_func in EVP_aes_128_ctr +do : + ac_fn_c_check_func "$LINENO" "EVP_aes_128_ctr" "ac_cv_func_EVP_aes_128_ctr" +if test "x$ac_cv_func_EVP_aes_128_ctr" = xyes; then : + cat >>confdefs.h <<_ACEOF +#define HAVE_EVP_AES_128_CTR 1 +_ACEOF + +fi +done + + LIBS="$save_LIBS" + + found_crypto="OpenSSL (AES-CTR: ${ac_cv_func_EVP_aes_128_ctr:-N/A})" +fi + if test "$ac_cv_libssl" = "yes"; then + OPENSSL_TRUE= + OPENSSL_FALSE='#' +else + OPENSSL_TRUE='#' + OPENSSL_FALSE= +fi + + +# Look for libgcrypt +if test "$found_crypto" = "none" && test "$use_libgcrypt" != "no"; then + + + + + + + + + + + use_additional=yes + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + + eval additional_includedir=\"$includedir\" + eval additional_libdir=\"$libdir\" + + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + +# Check whether --with-libgcrypt-prefix was given. +if test "${with_libgcrypt_prefix+set}" = set; then : + withval=$with_libgcrypt_prefix; + if test "X$withval" = "Xno"; then + use_additional=no + else + if test "X$withval" = "X"; then + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + + eval additional_includedir=\"$includedir\" + eval additional_libdir=\"$libdir\" + + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + else + additional_includedir="$withval/include" + additional_libdir="$withval/$acl_libdirstem" + fi + fi + +fi + + LIBGCRYPT= + LTLIBGCRYPT= + INCGCRYPT= + LIBGCRYPT_PREFIX= + rpathdirs= + ltrpathdirs= + names_already_handled= + names_next_round='gcrypt ' + while test -n "$names_next_round"; do + names_this_round="$names_next_round" + names_next_round= + for name in $names_this_round; do + already_handled= + for n in $names_already_handled; do + if test "$n" = "$name"; then + already_handled=yes + break + fi + done + if test -z "$already_handled"; then + names_already_handled="$names_already_handled $name" + uppername=`echo "$name" | sed -e 'y|abcdefghijklmnopqrstuvwxyz./-|ABCDEFGHIJKLMNOPQRSTUVWXYZ___|'` + eval value=\"\$HAVE_LIB$uppername\" + if test -n "$value"; then + if test "$value" = yes; then + eval value=\"\$LIB$uppername\" + test -z "$value" || LIBGCRYPT="${LIBGCRYPT}${LIBGCRYPT:+ }$value" + eval value=\"\$LTLIB$uppername\" + test -z "$value" || LTLIBGCRYPT="${LTLIBGCRYPT}${LTLIBGCRYPT:+ }$value" + else + : + fi + else + found_dir= + found_la= + found_so= + found_a= + eval libname=\"$acl_libname_spec\" # typically: libname=lib$name + if test -n "$acl_shlibext"; then + shrext=".$acl_shlibext" # typically: shrext=.so + else + shrext= + fi + if test $use_additional = yes; then + dir="$additional_libdir" + if test -n "$acl_shlibext"; then + if test -f "$dir/$libname$shrext"; then + found_dir="$dir" + found_so="$dir/$libname$shrext" + else + if test "$acl_library_names_spec" = '$libname$shrext$versuffix'; then + ver=`(cd "$dir" && \ + for f in "$libname$shrext".*; do echo "$f"; done \ + | sed -e "s,^$libname$shrext\\\\.,," \ + | sort -t '.' -n -r -k1,1 -k2,2 -k3,3 -k4,4 -k5,5 \ + | sed 1q ) 2>/dev/null` + if test -n "$ver" && test -f "$dir/$libname$shrext.$ver"; then + found_dir="$dir" + found_so="$dir/$libname$shrext.$ver" + fi + else + eval library_names=\"$acl_library_names_spec\" + for f in $library_names; do + if test -f "$dir/$f"; then + found_dir="$dir" + found_so="$dir/$f" + break + fi + done + fi + fi + fi + if test "X$found_dir" = "X"; then + if test -f "$dir/$libname.$acl_libext"; then + found_dir="$dir" + found_a="$dir/$libname.$acl_libext" + fi + fi + if test "X$found_dir" != "X"; then + if test -f "$dir/$libname.la"; then + found_la="$dir/$libname.la" + fi + fi + fi + if test "X$found_dir" = "X"; then + for x in $LDFLAGS $LTLIBGCRYPT; do + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + eval x=\"$x\" + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + case "$x" in + -L*) + dir=`echo "X$x" | sed -e 's/^X-L//'` + if test -n "$acl_shlibext"; then + if test -f "$dir/$libname$shrext"; then + found_dir="$dir" + found_so="$dir/$libname$shrext" + else + if test "$acl_library_names_spec" = '$libname$shrext$versuffix'; then + ver=`(cd "$dir" && \ + for f in "$libname$shrext".*; do echo "$f"; done \ + | sed -e "s,^$libname$shrext\\\\.,," \ + | sort -t '.' -n -r -k1,1 -k2,2 -k3,3 -k4,4 -k5,5 \ + | sed 1q ) 2>/dev/null` + if test -n "$ver" && test -f "$dir/$libname$shrext.$ver"; then + found_dir="$dir" + found_so="$dir/$libname$shrext.$ver" + fi + else + eval library_names=\"$acl_library_names_spec\" + for f in $library_names; do + if test -f "$dir/$f"; then + found_dir="$dir" + found_so="$dir/$f" + break + fi + done + fi + fi + fi + if test "X$found_dir" = "X"; then + if test -f "$dir/$libname.$acl_libext"; then + found_dir="$dir" + found_a="$dir/$libname.$acl_libext" + fi + fi + if test "X$found_dir" != "X"; then + if test -f "$dir/$libname.la"; then + found_la="$dir/$libname.la" + fi + fi + ;; + esac + if test "X$found_dir" != "X"; then + break + fi + done + fi + if test "X$found_dir" != "X"; then + LTLIBGCRYPT="${LTLIBGCRYPT}${LTLIBGCRYPT:+ }-L$found_dir -l$name" + if test "X$found_so" != "X"; then + if test "$enable_rpath" = no || test "X$found_dir" = "X/usr/$acl_libdirstem"; then + LIBGCRYPT="${LIBGCRYPT}${LIBGCRYPT:+ }$found_so" + else + haveit= + for x in $ltrpathdirs; do + if test "X$x" = "X$found_dir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + ltrpathdirs="$ltrpathdirs $found_dir" + fi + if test "$acl_hardcode_direct" = yes; then + LIBGCRYPT="${LIBGCRYPT}${LIBGCRYPT:+ }$found_so" + else + if test -n "$acl_hardcode_libdir_flag_spec" && test "$acl_hardcode_minus_L" = no; then + LIBGCRYPT="${LIBGCRYPT}${LIBGCRYPT:+ }$found_so" + haveit= + for x in $rpathdirs; do + if test "X$x" = "X$found_dir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + rpathdirs="$rpathdirs $found_dir" + fi + else + haveit= + for x in $LDFLAGS $LIBGCRYPT; do + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + eval x=\"$x\" + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + if test "X$x" = "X-L$found_dir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + LIBGCRYPT="${LIBGCRYPT}${LIBGCRYPT:+ }-L$found_dir" + fi + if test "$acl_hardcode_minus_L" != no; then + LIBGCRYPT="${LIBGCRYPT}${LIBGCRYPT:+ }$found_so" + else + LIBGCRYPT="${LIBGCRYPT}${LIBGCRYPT:+ }-l$name" + fi + fi + fi + fi + else + if test "X$found_a" != "X"; then + LIBGCRYPT="${LIBGCRYPT}${LIBGCRYPT:+ }$found_a" + else + LIBGCRYPT="${LIBGCRYPT}${LIBGCRYPT:+ }-L$found_dir -l$name" + fi + fi + additional_includedir= + case "$found_dir" in + */$acl_libdirstem | */$acl_libdirstem/) + basedir=`echo "X$found_dir" | sed -e 's,^X,,' -e "s,/$acl_libdirstem/"'*$,,'` + LIBGCRYPT_PREFIX="$basedir" + additional_includedir="$basedir/include" + ;; + esac + if test "X$additional_includedir" != "X"; then + if test "X$additional_includedir" != "X/usr/include"; then + haveit= + if test "X$additional_includedir" = "X/usr/local/include"; then + if test -n "$GCC"; then + case $host_os in + linux* | gnu* | k*bsd*-gnu) haveit=yes;; + esac + fi + fi + if test -z "$haveit"; then + for x in $CPPFLAGS $INCGCRYPT; do + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + eval x=\"$x\" + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + if test "X$x" = "X-I$additional_includedir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + if test -d "$additional_includedir"; then + INCGCRYPT="${INCGCRYPT}${INCGCRYPT:+ }-I$additional_includedir" + fi + fi + fi + fi + fi + if test -n "$found_la"; then + save_libdir="$libdir" + case "$found_la" in + */* | *\\*) . "$found_la" ;; + *) . "./$found_la" ;; + esac + libdir="$save_libdir" + for dep in $dependency_libs; do + case "$dep" in + -L*) + additional_libdir=`echo "X$dep" | sed -e 's/^X-L//'` + if test "X$additional_libdir" != "X/usr/$acl_libdirstem"; then + haveit= + if test "X$additional_libdir" = "X/usr/local/$acl_libdirstem"; then + if test -n "$GCC"; then + case $host_os in + linux* | gnu* | k*bsd*-gnu) haveit=yes;; + esac + fi + fi + if test -z "$haveit"; then + haveit= + for x in $LDFLAGS $LIBGCRYPT; do + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + eval x=\"$x\" + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + if test "X$x" = "X-L$additional_libdir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + if test -d "$additional_libdir"; then + LIBGCRYPT="${LIBGCRYPT}${LIBGCRYPT:+ }-L$additional_libdir" + fi + fi + haveit= + for x in $LDFLAGS $LTLIBGCRYPT; do + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + eval x=\"$x\" + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + if test "X$x" = "X-L$additional_libdir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + if test -d "$additional_libdir"; then + LTLIBGCRYPT="${LTLIBGCRYPT}${LTLIBGCRYPT:+ }-L$additional_libdir" + fi + fi + fi + fi + ;; + -R*) + dir=`echo "X$dep" | sed -e 's/^X-R//'` + if test "$enable_rpath" != no; then + haveit= + for x in $rpathdirs; do + if test "X$x" = "X$dir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + rpathdirs="$rpathdirs $dir" + fi + haveit= + for x in $ltrpathdirs; do + if test "X$x" = "X$dir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + ltrpathdirs="$ltrpathdirs $dir" + fi + fi + ;; + -l*) + names_next_round="$names_next_round "`echo "X$dep" | sed -e 's/^X-l//'` + ;; + *.la) + names_next_round="$names_next_round "`echo "X$dep" | sed -e 's,^X.*/,,' -e 's,^lib,,' -e 's,\.la$,,'` + ;; + *) + LIBGCRYPT="${LIBGCRYPT}${LIBGCRYPT:+ }$dep" + LTLIBGCRYPT="${LTLIBGCRYPT}${LTLIBGCRYPT:+ }$dep" + ;; + esac + done + fi + else + LIBGCRYPT="${LIBGCRYPT}${LIBGCRYPT:+ }-l$name" + LTLIBGCRYPT="${LTLIBGCRYPT}${LTLIBGCRYPT:+ }-l$name" + fi + fi + fi + done + done + if test "X$rpathdirs" != "X"; then + if test -n "$acl_hardcode_libdir_separator"; then + alldirs= + for found_dir in $rpathdirs; do + alldirs="${alldirs}${alldirs:+$acl_hardcode_libdir_separator}$found_dir" + done + acl_save_libdir="$libdir" + libdir="$alldirs" + eval flag=\"$acl_hardcode_libdir_flag_spec\" + libdir="$acl_save_libdir" + LIBGCRYPT="${LIBGCRYPT}${LIBGCRYPT:+ }$flag" + else + for found_dir in $rpathdirs; do + acl_save_libdir="$libdir" + libdir="$found_dir" + eval flag=\"$acl_hardcode_libdir_flag_spec\" + libdir="$acl_save_libdir" + LIBGCRYPT="${LIBGCRYPT}${LIBGCRYPT:+ }$flag" + done + fi + fi + if test "X$ltrpathdirs" != "X"; then + for found_dir in $ltrpathdirs; do + LTLIBGCRYPT="${LTLIBGCRYPT}${LTLIBGCRYPT:+ }-R$found_dir" + done + fi + + + ac_save_CPPFLAGS="$CPPFLAGS" + + for element in $INCGCRYPT; do + haveit= + for x in $CPPFLAGS; do + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + eval x=\"$x\" + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + if test "X$x" = "X$element"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + CPPFLAGS="${CPPFLAGS}${CPPFLAGS:+ }$element" + fi + done + + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for libgcrypt" >&5 +$as_echo_n "checking for libgcrypt... " >&6; } +if ${ac_cv_libgcrypt+:} false; then : + $as_echo_n "(cached) " >&6 +else + + ac_save_LIBS="$LIBS" + LIBS="$LIBS $LIBGCRYPT" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + ac_cv_libgcrypt=yes +else + ac_cv_libgcrypt=no +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext + LIBS="$ac_save_LIBS" + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_libgcrypt" >&5 +$as_echo "$ac_cv_libgcrypt" >&6; } + if test "$ac_cv_libgcrypt" = yes; then + HAVE_LIBGCRYPT=yes + +$as_echo "#define HAVE_LIBGCRYPT 1" >>confdefs.h + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to link with libgcrypt" >&5 +$as_echo_n "checking how to link with libgcrypt... " >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LIBGCRYPT" >&5 +$as_echo "$LIBGCRYPT" >&6; } + else + HAVE_LIBGCRYPT=no + CPPFLAGS="$ac_save_CPPFLAGS" + LIBGCRYPT= + LTLIBGCRYPT= + LIBGCRYPT_PREFIX= + fi + + + + + + + +fi +if test "$ac_cv_libgcrypt" = "yes"; then + +$as_echo "#define LIBSSH2_LIBGCRYPT 1" >>confdefs.h + + LIBSREQUIRED= # libgcrypt doesn't provide a .pc file. sad face. + LIBS="$LIBS -lgcrypt" + found_crypto=libgcrypt +fi + if test "$ac_cv_libgcrypt" = "yes"; then + LIBGCRYPT_TRUE= + LIBGCRYPT_FALSE='#' +else + LIBGCRYPT_TRUE='#' + LIBGCRYPT_FALSE= +fi + + +# Look for Windows Cryptography API: Next Generation +if test "$found_crypto" = "none" && test "$use_wincng" != "no"; then + + + + + + + + + + + use_additional=yes + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + + eval additional_includedir=\"$includedir\" + eval additional_libdir=\"$libdir\" + + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + +# Check whether --with-libbcrypt-prefix was given. +if test "${with_libbcrypt_prefix+set}" = set; then : + withval=$with_libbcrypt_prefix; + if test "X$withval" = "Xno"; then + use_additional=no + else + if test "X$withval" = "X"; then + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + + eval additional_includedir=\"$includedir\" + eval additional_libdir=\"$libdir\" + + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + else + additional_includedir="$withval/include" + additional_libdir="$withval/$acl_libdirstem" + fi + fi + +fi + + LIBBCRYPT= + LTLIBBCRYPT= + INCBCRYPT= + LIBBCRYPT_PREFIX= + rpathdirs= + ltrpathdirs= + names_already_handled= + names_next_round='bcrypt ' + while test -n "$names_next_round"; do + names_this_round="$names_next_round" + names_next_round= + for name in $names_this_round; do + already_handled= + for n in $names_already_handled; do + if test "$n" = "$name"; then + already_handled=yes + break + fi + done + if test -z "$already_handled"; then + names_already_handled="$names_already_handled $name" + uppername=`echo "$name" | sed -e 'y|abcdefghijklmnopqrstuvwxyz./-|ABCDEFGHIJKLMNOPQRSTUVWXYZ___|'` + eval value=\"\$HAVE_LIB$uppername\" + if test -n "$value"; then + if test "$value" = yes; then + eval value=\"\$LIB$uppername\" + test -z "$value" || LIBBCRYPT="${LIBBCRYPT}${LIBBCRYPT:+ }$value" + eval value=\"\$LTLIB$uppername\" + test -z "$value" || LTLIBBCRYPT="${LTLIBBCRYPT}${LTLIBBCRYPT:+ }$value" + else + : + fi + else + found_dir= + found_la= + found_so= + found_a= + eval libname=\"$acl_libname_spec\" # typically: libname=lib$name + if test -n "$acl_shlibext"; then + shrext=".$acl_shlibext" # typically: shrext=.so + else + shrext= + fi + if test $use_additional = yes; then + dir="$additional_libdir" + if test -n "$acl_shlibext"; then + if test -f "$dir/$libname$shrext"; then + found_dir="$dir" + found_so="$dir/$libname$shrext" + else + if test "$acl_library_names_spec" = '$libname$shrext$versuffix'; then + ver=`(cd "$dir" && \ + for f in "$libname$shrext".*; do echo "$f"; done \ + | sed -e "s,^$libname$shrext\\\\.,," \ + | sort -t '.' -n -r -k1,1 -k2,2 -k3,3 -k4,4 -k5,5 \ + | sed 1q ) 2>/dev/null` + if test -n "$ver" && test -f "$dir/$libname$shrext.$ver"; then + found_dir="$dir" + found_so="$dir/$libname$shrext.$ver" + fi + else + eval library_names=\"$acl_library_names_spec\" + for f in $library_names; do + if test -f "$dir/$f"; then + found_dir="$dir" + found_so="$dir/$f" + break + fi + done + fi + fi + fi + if test "X$found_dir" = "X"; then + if test -f "$dir/$libname.$acl_libext"; then + found_dir="$dir" + found_a="$dir/$libname.$acl_libext" + fi + fi + if test "X$found_dir" != "X"; then + if test -f "$dir/$libname.la"; then + found_la="$dir/$libname.la" + fi + fi + fi + if test "X$found_dir" = "X"; then + for x in $LDFLAGS $LTLIBBCRYPT; do + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + eval x=\"$x\" + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + case "$x" in + -L*) + dir=`echo "X$x" | sed -e 's/^X-L//'` + if test -n "$acl_shlibext"; then + if test -f "$dir/$libname$shrext"; then + found_dir="$dir" + found_so="$dir/$libname$shrext" + else + if test "$acl_library_names_spec" = '$libname$shrext$versuffix'; then + ver=`(cd "$dir" && \ + for f in "$libname$shrext".*; do echo "$f"; done \ + | sed -e "s,^$libname$shrext\\\\.,," \ + | sort -t '.' -n -r -k1,1 -k2,2 -k3,3 -k4,4 -k5,5 \ + | sed 1q ) 2>/dev/null` + if test -n "$ver" && test -f "$dir/$libname$shrext.$ver"; then + found_dir="$dir" + found_so="$dir/$libname$shrext.$ver" + fi + else + eval library_names=\"$acl_library_names_spec\" + for f in $library_names; do + if test -f "$dir/$f"; then + found_dir="$dir" + found_so="$dir/$f" + break + fi + done + fi + fi + fi + if test "X$found_dir" = "X"; then + if test -f "$dir/$libname.$acl_libext"; then + found_dir="$dir" + found_a="$dir/$libname.$acl_libext" + fi + fi + if test "X$found_dir" != "X"; then + if test -f "$dir/$libname.la"; then + found_la="$dir/$libname.la" + fi + fi + ;; + esac + if test "X$found_dir" != "X"; then + break + fi + done + fi + if test "X$found_dir" != "X"; then + LTLIBBCRYPT="${LTLIBBCRYPT}${LTLIBBCRYPT:+ }-L$found_dir -l$name" + if test "X$found_so" != "X"; then + if test "$enable_rpath" = no || test "X$found_dir" = "X/usr/$acl_libdirstem"; then + LIBBCRYPT="${LIBBCRYPT}${LIBBCRYPT:+ }$found_so" + else + haveit= + for x in $ltrpathdirs; do + if test "X$x" = "X$found_dir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + ltrpathdirs="$ltrpathdirs $found_dir" + fi + if test "$acl_hardcode_direct" = yes; then + LIBBCRYPT="${LIBBCRYPT}${LIBBCRYPT:+ }$found_so" + else + if test -n "$acl_hardcode_libdir_flag_spec" && test "$acl_hardcode_minus_L" = no; then + LIBBCRYPT="${LIBBCRYPT}${LIBBCRYPT:+ }$found_so" + haveit= + for x in $rpathdirs; do + if test "X$x" = "X$found_dir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + rpathdirs="$rpathdirs $found_dir" + fi + else + haveit= + for x in $LDFLAGS $LIBBCRYPT; do + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + eval x=\"$x\" + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + if test "X$x" = "X-L$found_dir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + LIBBCRYPT="${LIBBCRYPT}${LIBBCRYPT:+ }-L$found_dir" + fi + if test "$acl_hardcode_minus_L" != no; then + LIBBCRYPT="${LIBBCRYPT}${LIBBCRYPT:+ }$found_so" + else + LIBBCRYPT="${LIBBCRYPT}${LIBBCRYPT:+ }-l$name" + fi + fi + fi + fi + else + if test "X$found_a" != "X"; then + LIBBCRYPT="${LIBBCRYPT}${LIBBCRYPT:+ }$found_a" + else + LIBBCRYPT="${LIBBCRYPT}${LIBBCRYPT:+ }-L$found_dir -l$name" + fi + fi + additional_includedir= + case "$found_dir" in + */$acl_libdirstem | */$acl_libdirstem/) + basedir=`echo "X$found_dir" | sed -e 's,^X,,' -e "s,/$acl_libdirstem/"'*$,,'` + LIBBCRYPT_PREFIX="$basedir" + additional_includedir="$basedir/include" + ;; + esac + if test "X$additional_includedir" != "X"; then + if test "X$additional_includedir" != "X/usr/include"; then + haveit= + if test "X$additional_includedir" = "X/usr/local/include"; then + if test -n "$GCC"; then + case $host_os in + linux* | gnu* | k*bsd*-gnu) haveit=yes;; + esac + fi + fi + if test -z "$haveit"; then + for x in $CPPFLAGS $INCBCRYPT; do + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + eval x=\"$x\" + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + if test "X$x" = "X-I$additional_includedir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + if test -d "$additional_includedir"; then + INCBCRYPT="${INCBCRYPT}${INCBCRYPT:+ }-I$additional_includedir" + fi + fi + fi + fi + fi + if test -n "$found_la"; then + save_libdir="$libdir" + case "$found_la" in + */* | *\\*) . "$found_la" ;; + *) . "./$found_la" ;; + esac + libdir="$save_libdir" + for dep in $dependency_libs; do + case "$dep" in + -L*) + additional_libdir=`echo "X$dep" | sed -e 's/^X-L//'` + if test "X$additional_libdir" != "X/usr/$acl_libdirstem"; then + haveit= + if test "X$additional_libdir" = "X/usr/local/$acl_libdirstem"; then + if test -n "$GCC"; then + case $host_os in + linux* | gnu* | k*bsd*-gnu) haveit=yes;; + esac + fi + fi + if test -z "$haveit"; then + haveit= + for x in $LDFLAGS $LIBBCRYPT; do + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + eval x=\"$x\" + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + if test "X$x" = "X-L$additional_libdir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + if test -d "$additional_libdir"; then + LIBBCRYPT="${LIBBCRYPT}${LIBBCRYPT:+ }-L$additional_libdir" + fi + fi + haveit= + for x in $LDFLAGS $LTLIBBCRYPT; do + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + eval x=\"$x\" + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + if test "X$x" = "X-L$additional_libdir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + if test -d "$additional_libdir"; then + LTLIBBCRYPT="${LTLIBBCRYPT}${LTLIBBCRYPT:+ }-L$additional_libdir" + fi + fi + fi + fi + ;; + -R*) + dir=`echo "X$dep" | sed -e 's/^X-R//'` + if test "$enable_rpath" != no; then + haveit= + for x in $rpathdirs; do + if test "X$x" = "X$dir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + rpathdirs="$rpathdirs $dir" + fi + haveit= + for x in $ltrpathdirs; do + if test "X$x" = "X$dir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + ltrpathdirs="$ltrpathdirs $dir" + fi + fi + ;; + -l*) + names_next_round="$names_next_round "`echo "X$dep" | sed -e 's/^X-l//'` + ;; + *.la) + names_next_round="$names_next_round "`echo "X$dep" | sed -e 's,^X.*/,,' -e 's,^lib,,' -e 's,\.la$,,'` + ;; + *) + LIBBCRYPT="${LIBBCRYPT}${LIBBCRYPT:+ }$dep" + LTLIBBCRYPT="${LTLIBBCRYPT}${LTLIBBCRYPT:+ }$dep" + ;; + esac + done + fi + else + LIBBCRYPT="${LIBBCRYPT}${LIBBCRYPT:+ }-l$name" + LTLIBBCRYPT="${LTLIBBCRYPT}${LTLIBBCRYPT:+ }-l$name" + fi + fi + fi + done + done + if test "X$rpathdirs" != "X"; then + if test -n "$acl_hardcode_libdir_separator"; then + alldirs= + for found_dir in $rpathdirs; do + alldirs="${alldirs}${alldirs:+$acl_hardcode_libdir_separator}$found_dir" + done + acl_save_libdir="$libdir" + libdir="$alldirs" + eval flag=\"$acl_hardcode_libdir_flag_spec\" + libdir="$acl_save_libdir" + LIBBCRYPT="${LIBBCRYPT}${LIBBCRYPT:+ }$flag" + else + for found_dir in $rpathdirs; do + acl_save_libdir="$libdir" + libdir="$found_dir" + eval flag=\"$acl_hardcode_libdir_flag_spec\" + libdir="$acl_save_libdir" + LIBBCRYPT="${LIBBCRYPT}${LIBBCRYPT:+ }$flag" + done + fi + fi + if test "X$ltrpathdirs" != "X"; then + for found_dir in $ltrpathdirs; do + LTLIBBCRYPT="${LTLIBBCRYPT}${LTLIBBCRYPT:+ }-R$found_dir" + done + fi + + + ac_save_CPPFLAGS="$CPPFLAGS" + + for element in $INCBCRYPT; do + haveit= + for x in $CPPFLAGS; do + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + eval x=\"$x\" + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + if test "X$x" = "X$element"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + CPPFLAGS="${CPPFLAGS}${CPPFLAGS:+ }$element" + fi + done + + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for libbcrypt" >&5 +$as_echo_n "checking for libbcrypt... " >&6; } +if ${ac_cv_libbcrypt+:} false; then : + $as_echo_n "(cached) " >&6 +else + + ac_save_LIBS="$LIBS" + LIBS="$LIBS $LIBBCRYPT" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + + #include + #include + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + ac_cv_libbcrypt=yes +else + ac_cv_libbcrypt=no +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext + LIBS="$ac_save_LIBS" + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_libbcrypt" >&5 +$as_echo "$ac_cv_libbcrypt" >&6; } + if test "$ac_cv_libbcrypt" = yes; then + HAVE_LIBBCRYPT=yes + +$as_echo "#define HAVE_LIBBCRYPT 1" >>confdefs.h + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to link with libbcrypt" >&5 +$as_echo_n "checking how to link with libbcrypt... " >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LIBBCRYPT" >&5 +$as_echo "$LIBBCRYPT" >&6; } + else + HAVE_LIBBCRYPT=no + CPPFLAGS="$ac_save_CPPFLAGS" + LIBBCRYPT= + LTLIBBCRYPT= + LIBBCRYPT_PREFIX= + fi + + + + + + + + + + + + + + + + + + use_additional=yes + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + + eval additional_includedir=\"$includedir\" + eval additional_libdir=\"$libdir\" + + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + +# Check whether --with-libcrypt32-prefix was given. +if test "${with_libcrypt32_prefix+set}" = set; then : + withval=$with_libcrypt32_prefix; + if test "X$withval" = "Xno"; then + use_additional=no + else + if test "X$withval" = "X"; then + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + + eval additional_includedir=\"$includedir\" + eval additional_libdir=\"$libdir\" + + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + else + additional_includedir="$withval/include" + additional_libdir="$withval/$acl_libdirstem" + fi + fi + +fi + + LIBCRYPT32= + LTLIBCRYPT32= + INCCRYPT32= + LIBCRYPT32_PREFIX= + rpathdirs= + ltrpathdirs= + names_already_handled= + names_next_round='crypt32 ' + while test -n "$names_next_round"; do + names_this_round="$names_next_round" + names_next_round= + for name in $names_this_round; do + already_handled= + for n in $names_already_handled; do + if test "$n" = "$name"; then + already_handled=yes + break + fi + done + if test -z "$already_handled"; then + names_already_handled="$names_already_handled $name" + uppername=`echo "$name" | sed -e 'y|abcdefghijklmnopqrstuvwxyz./-|ABCDEFGHIJKLMNOPQRSTUVWXYZ___|'` + eval value=\"\$HAVE_LIB$uppername\" + if test -n "$value"; then + if test "$value" = yes; then + eval value=\"\$LIB$uppername\" + test -z "$value" || LIBCRYPT32="${LIBCRYPT32}${LIBCRYPT32:+ }$value" + eval value=\"\$LTLIB$uppername\" + test -z "$value" || LTLIBCRYPT32="${LTLIBCRYPT32}${LTLIBCRYPT32:+ }$value" + else + : + fi + else + found_dir= + found_la= + found_so= + found_a= + eval libname=\"$acl_libname_spec\" # typically: libname=lib$name + if test -n "$acl_shlibext"; then + shrext=".$acl_shlibext" # typically: shrext=.so + else + shrext= + fi + if test $use_additional = yes; then + dir="$additional_libdir" + if test -n "$acl_shlibext"; then + if test -f "$dir/$libname$shrext"; then + found_dir="$dir" + found_so="$dir/$libname$shrext" + else + if test "$acl_library_names_spec" = '$libname$shrext$versuffix'; then + ver=`(cd "$dir" && \ + for f in "$libname$shrext".*; do echo "$f"; done \ + | sed -e "s,^$libname$shrext\\\\.,," \ + | sort -t '.' -n -r -k1,1 -k2,2 -k3,3 -k4,4 -k5,5 \ + | sed 1q ) 2>/dev/null` + if test -n "$ver" && test -f "$dir/$libname$shrext.$ver"; then + found_dir="$dir" + found_so="$dir/$libname$shrext.$ver" + fi + else + eval library_names=\"$acl_library_names_spec\" + for f in $library_names; do + if test -f "$dir/$f"; then + found_dir="$dir" + found_so="$dir/$f" + break + fi + done + fi + fi + fi + if test "X$found_dir" = "X"; then + if test -f "$dir/$libname.$acl_libext"; then + found_dir="$dir" + found_a="$dir/$libname.$acl_libext" + fi + fi + if test "X$found_dir" != "X"; then + if test -f "$dir/$libname.la"; then + found_la="$dir/$libname.la" + fi + fi + fi + if test "X$found_dir" = "X"; then + for x in $LDFLAGS $LTLIBCRYPT32; do + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + eval x=\"$x\" + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + case "$x" in + -L*) + dir=`echo "X$x" | sed -e 's/^X-L//'` + if test -n "$acl_shlibext"; then + if test -f "$dir/$libname$shrext"; then + found_dir="$dir" + found_so="$dir/$libname$shrext" + else + if test "$acl_library_names_spec" = '$libname$shrext$versuffix'; then + ver=`(cd "$dir" && \ + for f in "$libname$shrext".*; do echo "$f"; done \ + | sed -e "s,^$libname$shrext\\\\.,," \ + | sort -t '.' -n -r -k1,1 -k2,2 -k3,3 -k4,4 -k5,5 \ + | sed 1q ) 2>/dev/null` + if test -n "$ver" && test -f "$dir/$libname$shrext.$ver"; then + found_dir="$dir" + found_so="$dir/$libname$shrext.$ver" + fi + else + eval library_names=\"$acl_library_names_spec\" + for f in $library_names; do + if test -f "$dir/$f"; then + found_dir="$dir" + found_so="$dir/$f" + break + fi + done + fi + fi + fi + if test "X$found_dir" = "X"; then + if test -f "$dir/$libname.$acl_libext"; then + found_dir="$dir" + found_a="$dir/$libname.$acl_libext" + fi + fi + if test "X$found_dir" != "X"; then + if test -f "$dir/$libname.la"; then + found_la="$dir/$libname.la" + fi + fi + ;; + esac + if test "X$found_dir" != "X"; then + break + fi + done + fi + if test "X$found_dir" != "X"; then + LTLIBCRYPT32="${LTLIBCRYPT32}${LTLIBCRYPT32:+ }-L$found_dir -l$name" + if test "X$found_so" != "X"; then + if test "$enable_rpath" = no || test "X$found_dir" = "X/usr/$acl_libdirstem"; then + LIBCRYPT32="${LIBCRYPT32}${LIBCRYPT32:+ }$found_so" + else + haveit= + for x in $ltrpathdirs; do + if test "X$x" = "X$found_dir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + ltrpathdirs="$ltrpathdirs $found_dir" + fi + if test "$acl_hardcode_direct" = yes; then + LIBCRYPT32="${LIBCRYPT32}${LIBCRYPT32:+ }$found_so" + else + if test -n "$acl_hardcode_libdir_flag_spec" && test "$acl_hardcode_minus_L" = no; then + LIBCRYPT32="${LIBCRYPT32}${LIBCRYPT32:+ }$found_so" + haveit= + for x in $rpathdirs; do + if test "X$x" = "X$found_dir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + rpathdirs="$rpathdirs $found_dir" + fi + else + haveit= + for x in $LDFLAGS $LIBCRYPT32; do + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + eval x=\"$x\" + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + if test "X$x" = "X-L$found_dir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + LIBCRYPT32="${LIBCRYPT32}${LIBCRYPT32:+ }-L$found_dir" + fi + if test "$acl_hardcode_minus_L" != no; then + LIBCRYPT32="${LIBCRYPT32}${LIBCRYPT32:+ }$found_so" + else + LIBCRYPT32="${LIBCRYPT32}${LIBCRYPT32:+ }-l$name" + fi + fi + fi + fi + else + if test "X$found_a" != "X"; then + LIBCRYPT32="${LIBCRYPT32}${LIBCRYPT32:+ }$found_a" + else + LIBCRYPT32="${LIBCRYPT32}${LIBCRYPT32:+ }-L$found_dir -l$name" + fi + fi + additional_includedir= + case "$found_dir" in + */$acl_libdirstem | */$acl_libdirstem/) + basedir=`echo "X$found_dir" | sed -e 's,^X,,' -e "s,/$acl_libdirstem/"'*$,,'` + LIBCRYPT32_PREFIX="$basedir" + additional_includedir="$basedir/include" + ;; + esac + if test "X$additional_includedir" != "X"; then + if test "X$additional_includedir" != "X/usr/include"; then + haveit= + if test "X$additional_includedir" = "X/usr/local/include"; then + if test -n "$GCC"; then + case $host_os in + linux* | gnu* | k*bsd*-gnu) haveit=yes;; + esac + fi + fi + if test -z "$haveit"; then + for x in $CPPFLAGS $INCCRYPT32; do + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + eval x=\"$x\" + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + if test "X$x" = "X-I$additional_includedir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + if test -d "$additional_includedir"; then + INCCRYPT32="${INCCRYPT32}${INCCRYPT32:+ }-I$additional_includedir" + fi + fi + fi + fi + fi + if test -n "$found_la"; then + save_libdir="$libdir" + case "$found_la" in + */* | *\\*) . "$found_la" ;; + *) . "./$found_la" ;; + esac + libdir="$save_libdir" + for dep in $dependency_libs; do + case "$dep" in + -L*) + additional_libdir=`echo "X$dep" | sed -e 's/^X-L//'` + if test "X$additional_libdir" != "X/usr/$acl_libdirstem"; then + haveit= + if test "X$additional_libdir" = "X/usr/local/$acl_libdirstem"; then + if test -n "$GCC"; then + case $host_os in + linux* | gnu* | k*bsd*-gnu) haveit=yes;; + esac + fi + fi + if test -z "$haveit"; then + haveit= + for x in $LDFLAGS $LIBCRYPT32; do + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + eval x=\"$x\" + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + if test "X$x" = "X-L$additional_libdir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + if test -d "$additional_libdir"; then + LIBCRYPT32="${LIBCRYPT32}${LIBCRYPT32:+ }-L$additional_libdir" + fi + fi + haveit= + for x in $LDFLAGS $LTLIBCRYPT32; do + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + eval x=\"$x\" + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + if test "X$x" = "X-L$additional_libdir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + if test -d "$additional_libdir"; then + LTLIBCRYPT32="${LTLIBCRYPT32}${LTLIBCRYPT32:+ }-L$additional_libdir" + fi + fi + fi + fi + ;; + -R*) + dir=`echo "X$dep" | sed -e 's/^X-R//'` + if test "$enable_rpath" != no; then + haveit= + for x in $rpathdirs; do + if test "X$x" = "X$dir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + rpathdirs="$rpathdirs $dir" + fi + haveit= + for x in $ltrpathdirs; do + if test "X$x" = "X$dir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + ltrpathdirs="$ltrpathdirs $dir" + fi + fi + ;; + -l*) + names_next_round="$names_next_round "`echo "X$dep" | sed -e 's/^X-l//'` + ;; + *.la) + names_next_round="$names_next_round "`echo "X$dep" | sed -e 's,^X.*/,,' -e 's,^lib,,' -e 's,\.la$,,'` + ;; + *) + LIBCRYPT32="${LIBCRYPT32}${LIBCRYPT32:+ }$dep" + LTLIBCRYPT32="${LTLIBCRYPT32}${LTLIBCRYPT32:+ }$dep" + ;; + esac + done + fi + else + LIBCRYPT32="${LIBCRYPT32}${LIBCRYPT32:+ }-l$name" + LTLIBCRYPT32="${LTLIBCRYPT32}${LTLIBCRYPT32:+ }-l$name" + fi + fi + fi + done + done + if test "X$rpathdirs" != "X"; then + if test -n "$acl_hardcode_libdir_separator"; then + alldirs= + for found_dir in $rpathdirs; do + alldirs="${alldirs}${alldirs:+$acl_hardcode_libdir_separator}$found_dir" + done + acl_save_libdir="$libdir" + libdir="$alldirs" + eval flag=\"$acl_hardcode_libdir_flag_spec\" + libdir="$acl_save_libdir" + LIBCRYPT32="${LIBCRYPT32}${LIBCRYPT32:+ }$flag" + else + for found_dir in $rpathdirs; do + acl_save_libdir="$libdir" + libdir="$found_dir" + eval flag=\"$acl_hardcode_libdir_flag_spec\" + libdir="$acl_save_libdir" + LIBCRYPT32="${LIBCRYPT32}${LIBCRYPT32:+ }$flag" + done + fi + fi + if test "X$ltrpathdirs" != "X"; then + for found_dir in $ltrpathdirs; do + LTLIBCRYPT32="${LTLIBCRYPT32}${LTLIBCRYPT32:+ }-R$found_dir" + done + fi + + + ac_save_CPPFLAGS="$CPPFLAGS" + + for element in $INCCRYPT32; do + haveit= + for x in $CPPFLAGS; do + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + eval x=\"$x\" + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + if test "X$x" = "X$element"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + CPPFLAGS="${CPPFLAGS}${CPPFLAGS:+ }$element" + fi + done + + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for libcrypt32" >&5 +$as_echo_n "checking for libcrypt32... " >&6; } +if ${ac_cv_libcrypt32+:} false; then : + $as_echo_n "(cached) " >&6 +else + + ac_save_LIBS="$LIBS" + LIBS="$LIBS $LIBCRYPT32" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + + #include + #include + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + ac_cv_libcrypt32=yes +else + ac_cv_libcrypt32=no +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext + LIBS="$ac_save_LIBS" + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_libcrypt32" >&5 +$as_echo "$ac_cv_libcrypt32" >&6; } + if test "$ac_cv_libcrypt32" = yes; then + HAVE_LIBCRYPT32=yes + +$as_echo "#define HAVE_LIBCRYPT32 1" >>confdefs.h + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to link with libcrypt32" >&5 +$as_echo_n "checking how to link with libcrypt32... " >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LIBCRYPT32" >&5 +$as_echo "$LIBCRYPT32" >&6; } + else + HAVE_LIBCRYPT32=no + CPPFLAGS="$ac_save_CPPFLAGS" + LIBCRYPT32= + LTLIBCRYPT32= + LIBCRYPT32_PREFIX= + fi + + + + + + + + for ac_header in ntdef.h ntstatus.h +do : + as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` +ac_fn_c_check_header_compile "$LINENO" "$ac_header" "$as_ac_Header" " + #include + +" +if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : + cat >>confdefs.h <<_ACEOF +#define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 +_ACEOF + +fi + +done + + ac_fn_c_check_decl "$LINENO" "SecureZeroMemory" "ac_cv_have_decl_SecureZeroMemory" " + #include + +" +if test "x$ac_cv_have_decl_SecureZeroMemory" = xyes; then : + ac_have_decl=1 +else + ac_have_decl=0 +fi + +cat >>confdefs.h <<_ACEOF +#define HAVE_DECL_SECUREZEROMEMORY $ac_have_decl +_ACEOF + +fi +if test "$ac_cv_libbcrypt" = "yes"; then + +$as_echo "#define LIBSSH2_WINCNG 1" >>confdefs.h + + LIBSREQUIRED= # wincng doesn't provide a .pc file. sad face. + LIBS="$LIBS -lbcrypt" + if test "$ac_cv_libcrypt32" = "yes"; then + LIBS="$LIBS -lcrypt32" + fi + found_crypto="Windows Cryptography API: Next Generation" + if test "$ac_cv_have_decl_SecureZeroMemory" = "yes"; then + support_clear_memory=yes + fi +fi + if test "$ac_cv_libbcrypt" = "yes"; then + WINCNG_TRUE= + WINCNG_FALSE='#' +else + WINCNG_TRUE='#' + WINCNG_FALSE= +fi + + + if false; then + OS400QC3_TRUE= + OS400QC3_FALSE='#' +else + OS400QC3_TRUE='#' + OS400QC3_FALSE= +fi + + +# Check if crypto library was found +if test "$found_crypto" = "none"; then + as_fn_error $? "No crypto library found! +Try --with-libssl-prefix=PATH + or --with-libgcrypt-prefix=PATH + or --with-wincng on Windows\ +" "$LINENO" 5 +fi + +# Look for Libz +if test "$use_libz" != "no"; then + + + + + + + + + + + use_additional=yes + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + + eval additional_includedir=\"$includedir\" + eval additional_libdir=\"$libdir\" + + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + +# Check whether --with-libz-prefix was given. +if test "${with_libz_prefix+set}" = set; then : + withval=$with_libz_prefix; + if test "X$withval" = "Xno"; then + use_additional=no + else + if test "X$withval" = "X"; then + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + + eval additional_includedir=\"$includedir\" + eval additional_libdir=\"$libdir\" + + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + else + additional_includedir="$withval/include" + additional_libdir="$withval/$acl_libdirstem" + fi + fi + +fi + + LIBZ= + LTLIBZ= + INCZ= + LIBZ_PREFIX= + rpathdirs= + ltrpathdirs= + names_already_handled= + names_next_round='z ' + while test -n "$names_next_round"; do + names_this_round="$names_next_round" + names_next_round= + for name in $names_this_round; do + already_handled= + for n in $names_already_handled; do + if test "$n" = "$name"; then + already_handled=yes + break + fi + done + if test -z "$already_handled"; then + names_already_handled="$names_already_handled $name" + uppername=`echo "$name" | sed -e 'y|abcdefghijklmnopqrstuvwxyz./-|ABCDEFGHIJKLMNOPQRSTUVWXYZ___|'` + eval value=\"\$HAVE_LIB$uppername\" + if test -n "$value"; then + if test "$value" = yes; then + eval value=\"\$LIB$uppername\" + test -z "$value" || LIBZ="${LIBZ}${LIBZ:+ }$value" + eval value=\"\$LTLIB$uppername\" + test -z "$value" || LTLIBZ="${LTLIBZ}${LTLIBZ:+ }$value" + else + : + fi + else + found_dir= + found_la= + found_so= + found_a= + eval libname=\"$acl_libname_spec\" # typically: libname=lib$name + if test -n "$acl_shlibext"; then + shrext=".$acl_shlibext" # typically: shrext=.so + else + shrext= + fi + if test $use_additional = yes; then + dir="$additional_libdir" + if test -n "$acl_shlibext"; then + if test -f "$dir/$libname$shrext"; then + found_dir="$dir" + found_so="$dir/$libname$shrext" + else + if test "$acl_library_names_spec" = '$libname$shrext$versuffix'; then + ver=`(cd "$dir" && \ + for f in "$libname$shrext".*; do echo "$f"; done \ + | sed -e "s,^$libname$shrext\\\\.,," \ + | sort -t '.' -n -r -k1,1 -k2,2 -k3,3 -k4,4 -k5,5 \ + | sed 1q ) 2>/dev/null` + if test -n "$ver" && test -f "$dir/$libname$shrext.$ver"; then + found_dir="$dir" + found_so="$dir/$libname$shrext.$ver" + fi + else + eval library_names=\"$acl_library_names_spec\" + for f in $library_names; do + if test -f "$dir/$f"; then + found_dir="$dir" + found_so="$dir/$f" + break + fi + done + fi + fi + fi + if test "X$found_dir" = "X"; then + if test -f "$dir/$libname.$acl_libext"; then + found_dir="$dir" + found_a="$dir/$libname.$acl_libext" + fi + fi + if test "X$found_dir" != "X"; then + if test -f "$dir/$libname.la"; then + found_la="$dir/$libname.la" + fi + fi + fi + if test "X$found_dir" = "X"; then + for x in $LDFLAGS $LTLIBZ; do + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + eval x=\"$x\" + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + case "$x" in + -L*) + dir=`echo "X$x" | sed -e 's/^X-L//'` + if test -n "$acl_shlibext"; then + if test -f "$dir/$libname$shrext"; then + found_dir="$dir" + found_so="$dir/$libname$shrext" + else + if test "$acl_library_names_spec" = '$libname$shrext$versuffix'; then + ver=`(cd "$dir" && \ + for f in "$libname$shrext".*; do echo "$f"; done \ + | sed -e "s,^$libname$shrext\\\\.,," \ + | sort -t '.' -n -r -k1,1 -k2,2 -k3,3 -k4,4 -k5,5 \ + | sed 1q ) 2>/dev/null` + if test -n "$ver" && test -f "$dir/$libname$shrext.$ver"; then + found_dir="$dir" + found_so="$dir/$libname$shrext.$ver" + fi + else + eval library_names=\"$acl_library_names_spec\" + for f in $library_names; do + if test -f "$dir/$f"; then + found_dir="$dir" + found_so="$dir/$f" + break + fi + done + fi + fi + fi + if test "X$found_dir" = "X"; then + if test -f "$dir/$libname.$acl_libext"; then + found_dir="$dir" + found_a="$dir/$libname.$acl_libext" + fi + fi + if test "X$found_dir" != "X"; then + if test -f "$dir/$libname.la"; then + found_la="$dir/$libname.la" + fi + fi + ;; + esac + if test "X$found_dir" != "X"; then + break + fi + done + fi + if test "X$found_dir" != "X"; then + LTLIBZ="${LTLIBZ}${LTLIBZ:+ }-L$found_dir -l$name" + if test "X$found_so" != "X"; then + if test "$enable_rpath" = no || test "X$found_dir" = "X/usr/$acl_libdirstem"; then + LIBZ="${LIBZ}${LIBZ:+ }$found_so" + else + haveit= + for x in $ltrpathdirs; do + if test "X$x" = "X$found_dir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + ltrpathdirs="$ltrpathdirs $found_dir" + fi + if test "$acl_hardcode_direct" = yes; then + LIBZ="${LIBZ}${LIBZ:+ }$found_so" + else + if test -n "$acl_hardcode_libdir_flag_spec" && test "$acl_hardcode_minus_L" = no; then + LIBZ="${LIBZ}${LIBZ:+ }$found_so" + haveit= + for x in $rpathdirs; do + if test "X$x" = "X$found_dir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + rpathdirs="$rpathdirs $found_dir" + fi + else + haveit= + for x in $LDFLAGS $LIBZ; do + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + eval x=\"$x\" + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + if test "X$x" = "X-L$found_dir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + LIBZ="${LIBZ}${LIBZ:+ }-L$found_dir" + fi + if test "$acl_hardcode_minus_L" != no; then + LIBZ="${LIBZ}${LIBZ:+ }$found_so" + else + LIBZ="${LIBZ}${LIBZ:+ }-l$name" + fi + fi + fi + fi + else + if test "X$found_a" != "X"; then + LIBZ="${LIBZ}${LIBZ:+ }$found_a" + else + LIBZ="${LIBZ}${LIBZ:+ }-L$found_dir -l$name" + fi + fi + additional_includedir= + case "$found_dir" in + */$acl_libdirstem | */$acl_libdirstem/) + basedir=`echo "X$found_dir" | sed -e 's,^X,,' -e "s,/$acl_libdirstem/"'*$,,'` + LIBZ_PREFIX="$basedir" + additional_includedir="$basedir/include" + ;; + esac + if test "X$additional_includedir" != "X"; then + if test "X$additional_includedir" != "X/usr/include"; then + haveit= + if test "X$additional_includedir" = "X/usr/local/include"; then + if test -n "$GCC"; then + case $host_os in + linux* | gnu* | k*bsd*-gnu) haveit=yes;; + esac + fi + fi + if test -z "$haveit"; then + for x in $CPPFLAGS $INCZ; do + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + eval x=\"$x\" + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + if test "X$x" = "X-I$additional_includedir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + if test -d "$additional_includedir"; then + INCZ="${INCZ}${INCZ:+ }-I$additional_includedir" + fi + fi + fi + fi + fi + if test -n "$found_la"; then + save_libdir="$libdir" + case "$found_la" in + */* | *\\*) . "$found_la" ;; + *) . "./$found_la" ;; + esac + libdir="$save_libdir" + for dep in $dependency_libs; do + case "$dep" in + -L*) + additional_libdir=`echo "X$dep" | sed -e 's/^X-L//'` + if test "X$additional_libdir" != "X/usr/$acl_libdirstem"; then + haveit= + if test "X$additional_libdir" = "X/usr/local/$acl_libdirstem"; then + if test -n "$GCC"; then + case $host_os in + linux* | gnu* | k*bsd*-gnu) haveit=yes;; + esac + fi + fi + if test -z "$haveit"; then + haveit= + for x in $LDFLAGS $LIBZ; do + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + eval x=\"$x\" + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + if test "X$x" = "X-L$additional_libdir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + if test -d "$additional_libdir"; then + LIBZ="${LIBZ}${LIBZ:+ }-L$additional_libdir" + fi + fi + haveit= + for x in $LDFLAGS $LTLIBZ; do + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + eval x=\"$x\" + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + if test "X$x" = "X-L$additional_libdir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + if test -d "$additional_libdir"; then + LTLIBZ="${LTLIBZ}${LTLIBZ:+ }-L$additional_libdir" + fi + fi + fi + fi + ;; + -R*) + dir=`echo "X$dep" | sed -e 's/^X-R//'` + if test "$enable_rpath" != no; then + haveit= + for x in $rpathdirs; do + if test "X$x" = "X$dir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + rpathdirs="$rpathdirs $dir" + fi + haveit= + for x in $ltrpathdirs; do + if test "X$x" = "X$dir"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + ltrpathdirs="$ltrpathdirs $dir" + fi + fi + ;; + -l*) + names_next_round="$names_next_round "`echo "X$dep" | sed -e 's/^X-l//'` + ;; + *.la) + names_next_round="$names_next_round "`echo "X$dep" | sed -e 's,^X.*/,,' -e 's,^lib,,' -e 's,\.la$,,'` + ;; + *) + LIBZ="${LIBZ}${LIBZ:+ }$dep" + LTLIBZ="${LTLIBZ}${LTLIBZ:+ }$dep" + ;; + esac + done + fi + else + LIBZ="${LIBZ}${LIBZ:+ }-l$name" + LTLIBZ="${LTLIBZ}${LTLIBZ:+ }-l$name" + fi + fi + fi + done + done + if test "X$rpathdirs" != "X"; then + if test -n "$acl_hardcode_libdir_separator"; then + alldirs= + for found_dir in $rpathdirs; do + alldirs="${alldirs}${alldirs:+$acl_hardcode_libdir_separator}$found_dir" + done + acl_save_libdir="$libdir" + libdir="$alldirs" + eval flag=\"$acl_hardcode_libdir_flag_spec\" + libdir="$acl_save_libdir" + LIBZ="${LIBZ}${LIBZ:+ }$flag" + else + for found_dir in $rpathdirs; do + acl_save_libdir="$libdir" + libdir="$found_dir" + eval flag=\"$acl_hardcode_libdir_flag_spec\" + libdir="$acl_save_libdir" + LIBZ="${LIBZ}${LIBZ:+ }$flag" + done + fi + fi + if test "X$ltrpathdirs" != "X"; then + for found_dir in $ltrpathdirs; do + LTLIBZ="${LTLIBZ}${LTLIBZ:+ }-R$found_dir" + done + fi + + + ac_save_CPPFLAGS="$CPPFLAGS" + + for element in $INCZ; do + haveit= + for x in $CPPFLAGS; do + + acl_save_prefix="$prefix" + prefix="$acl_final_prefix" + acl_save_exec_prefix="$exec_prefix" + exec_prefix="$acl_final_exec_prefix" + eval x=\"$x\" + exec_prefix="$acl_save_exec_prefix" + prefix="$acl_save_prefix" + + if test "X$x" = "X$element"; then + haveit=yes + break + fi + done + if test -z "$haveit"; then + CPPFLAGS="${CPPFLAGS}${CPPFLAGS:+ }$element" + fi + done + + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for libz" >&5 +$as_echo_n "checking for libz... " >&6; } +if ${ac_cv_libz+:} false; then : + $as_echo_n "(cached) " >&6 +else + + ac_save_LIBS="$LIBS" + LIBS="$LIBS $LIBZ" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + ac_cv_libz=yes +else + ac_cv_libz=no +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext + LIBS="$ac_save_LIBS" + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_libz" >&5 +$as_echo "$ac_cv_libz" >&6; } + if test "$ac_cv_libz" = yes; then + HAVE_LIBZ=yes + +$as_echo "#define HAVE_LIBZ 1" >>confdefs.h + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to link with libz" >&5 +$as_echo_n "checking how to link with libz... " >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LIBZ" >&5 +$as_echo "$LIBZ" >&6; } + else + HAVE_LIBZ=no + CPPFLAGS="$ac_save_CPPFLAGS" + LIBZ= + LTLIBZ= + LIBZ_PREFIX= + fi + + + + + + + + if test "$ac_cv_libz" != yes; then + { $as_echo "$as_me:${as_lineno-$LINENO}: Cannot find zlib, disabling compression" >&5 +$as_echo "$as_me: Cannot find zlib, disabling compression" >&6;} + { $as_echo "$as_me:${as_lineno-$LINENO}: Try --with-libz-prefix=PATH if you know you have it" >&5 +$as_echo "$as_me: Try --with-libz-prefix=PATH if you know you have it" >&6;} + else + +$as_echo "#define LIBSSH2_HAVE_ZLIB 1" >>confdefs.h + + if test "${LIBSREQUIRED}" != ""; then + LIBSREQUIRED="${LIBSREQUIRED}," + fi + LIBSREQUIRED="${LIBSREQUIRED}zlib" + fi +fi + + + +# +# Optional Settings +# +# Check whether --enable-crypt-none was given. +if test "${enable_crypt_none+set}" = set; then : + enableval=$enable_crypt_none; +$as_echo "#define LIBSSH2_CRYPT_NONE 1" >>confdefs.h + +fi + + +# Check whether --enable-mac-none was given. +if test "${enable_mac_none+set}" = set; then : + enableval=$enable_mac_none; +$as_echo "#define LIBSSH2_MAC_NONE 1" >>confdefs.h + +fi + + +# Check whether --enable-gex-new was given. +if test "${enable_gex_new+set}" = set; then : + enableval=$enable_gex_new; GEX_NEW=$enableval +fi + +if test "$GEX_NEW" != "no"; then + +$as_echo "#define LIBSSH2_DH_GEX_NEW 1" >>confdefs.h + +fi + +# Check whether --enable-clear-memory was given. +if test "${enable_clear_memory+set}" = set; then : + enableval=$enable_clear_memory; CLEAR_MEMORY=$enableval +fi + +if test "$CLEAR_MEMORY" != "no"; then + if test "$support_clear_memory" = "yes"; then + +$as_echo "#define LIBSSH2_CLEAR_MEMORY 1" >>confdefs.h + + enable_clear_memory=yes + else + if test "$CLEAR_MEMORY" = "yes"; then + as_fn_error $? "secure clearing/zeroing of memory is not supported by the selected crypto backend" "$LINENO" 5 + else + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: secure clearing/zeroing of memory is not supported by the selected crypto backend" >&5 +$as_echo "$as_me: WARNING: secure clearing/zeroing of memory is not supported by the selected crypto backend" >&2;} + fi + enable_clear_memory=unsupported + fi +else + if test "$support_clear_memory" = "yes"; then + enable_clear_memory=no + else + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: secure clearing/zeroing of memory is not supported by the selected crypto backend" >&5 +$as_echo "$as_me: WARNING: secure clearing/zeroing of memory is not supported by the selected crypto backend" >&2;} + enable_clear_memory=unsupported + fi +fi + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to enable pedantic and debug compiler options" >&5 +$as_echo_n "checking whether to enable pedantic and debug compiler options... " >&6; } +# Check whether --enable-debug was given. +if test "${enable_debug+set}" = set; then : + enableval=$enable_debug; case "$enable_debug" in + no) + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + ;; + *) { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + enable_debug=yes + CPPFLAGS="$CPPFLAGS -DLIBSSH2DEBUG" + CFLAGS="$CFLAGS -g" + + + if test "z$ICC" = "z"; then + + ICC="no" + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for icc in use" >&5 +$as_echo_n "checking for icc in use... " >&6; } + if test "$GCC" = "yes"; then + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +__INTEL_COMPILER +_ACEOF +if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | + $EGREP "^__INTEL_COMPILER" >/dev/null 2>&1; then : + ICC="no" +else + ICC="yes" + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + + +fi +rm -f conftest* + + fi + if test "$ICC" = "no"; then + # this is not ICC + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + fi + + fi + + if test "$GCC" = "yes"; then + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking gcc version" >&5 +$as_echo_n "checking gcc version... " >&6; } + gccver=`$CC -dumpversion` + num1=`echo $gccver | cut -d . -f1` + num2=`echo $gccver | cut -d . -f2` + gccnum=`(expr $num1 "*" 100 + $num2) 2>/dev/null` + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $gccver" >&5 +$as_echo "$gccver" >&6; } + + if test "$ICC" = "yes"; then + + + WARN="-wd279,269,981,1418,1419" + + if test "$gccnum" -gt "600"; then + WARN="-Wall $WARN" + fi + else WARN="-W -Wall -Wwrite-strings -pedantic -Wpointer-arith -Wnested-externs -Winline -Wmissing-prototypes" + + + if test "$gccnum" -ge "207"; then + WARN="$WARN -Wmissing-declarations" + fi + + if test "$gccnum" -gt "295"; then + WARN="$WARN -Wundef -Wno-long-long -Wsign-compare" + fi + + if test "$gccnum" -ge "296"; then + WARN="$WARN -Wfloat-equal" + fi + + if test "$gccnum" -gt "296"; then + WARN="$WARN -Wno-format-nonliteral" + fi + + + if test "$gccnum" -ge "303"; then + WARN="$WARN -Wendif-labels -Wstrict-prototypes" + fi + + if test "$gccnum" -ge "304"; then + # try these on gcc 3.4 + WARN="$WARN -Wdeclaration-after-statement" + fi + + for flag in $CPPFLAGS; do + case "$flag" in + -I*) + add=`echo $flag | sed 's/^-I/-isystem /g'` + WARN="$WARN $add" + ;; + esac + done + + fi + CFLAGS="$CFLAGS $WARN" + + { $as_echo "$as_me:${as_lineno-$LINENO}: Added this set of compiler options: $WARN" >&5 +$as_echo "$as_me: Added this set of compiler options: $WARN" >&6;} + + else + { $as_echo "$as_me:${as_lineno-$LINENO}: Added no extra compiler options" >&5 +$as_echo "$as_me: Added no extra compiler options" >&6;} + + fi + NEWFLAGS="" + for flag in $CFLAGS; do + case "$flag" in + -O*) + ;; + *) + NEWFLAGS="$NEWFLAGS $flag" + ;; + esac + done + CFLAGS=$NEWFLAGS + + + ;; + esac + +else + enable_debug=no + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + +fi + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to enable hidden symbols in the library" >&5 +$as_echo_n "checking whether to enable hidden symbols in the library... " >&6; } +# Check whether --enable-hidden-symbols was given. +if test "${enable_hidden_symbols+set}" = set; then : + enableval=$enable_hidden_symbols; case "$enableval" in + no) + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + ;; + *) + { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC supports it" >&5 +$as_echo_n "checking whether $CC supports it... " >&6; } + if test "$GCC" = yes ; then + if $CC --help --verbose 2>&1 | grep fvisibility= > /dev/null ; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + +$as_echo "#define LIBSSH2_API __attribute__ ((visibility (\"default\")))" >>confdefs.h + + CFLAGS="$CFLAGS -fvisibility=hidden" + else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + fi + + else + if $CC 2>&1 | grep flags >/dev/null && $CC -flags | grep xldscope= >/dev/null ; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + +$as_echo "#define LIBSSH2_API __global" >>confdefs.h + + CFLAGS="$CFLAGS -xldscope=hidden" + else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + fi + fi + ;; + esac +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + +fi + + +# Build example applications? +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build example applications" >&5 +$as_echo_n "checking whether to build example applications... " >&6; } +# Check whether --enable-examples-build was given. +if test "${enable_examples_build+set}" = set; then : + enableval=$enable_examples_build; case "$enableval" in + no | false) + build_examples='no' + ;; + *) + build_examples='yes' + ;; +esac +else + build_examples='yes' +fi + +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $build_examples" >&5 +$as_echo "$build_examples" >&6; } + if test "x$build_examples" != "xno"; then + BUILD_EXAMPLES_TRUE= + BUILD_EXAMPLES_FALSE='#' +else + BUILD_EXAMPLES_TRUE='#' + BUILD_EXAMPLES_FALSE= +fi + + +# Checks for header files. +# AC_HEADER_STDC +for ac_header in errno.h fcntl.h stdio.h stdlib.h unistd.h sys/uio.h +do : + as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` +ac_fn_c_check_header_mongrel "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default" +if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : + cat >>confdefs.h <<_ACEOF +#define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 +_ACEOF + +fi + +done + +for ac_header in sys/select.h sys/socket.h sys/ioctl.h sys/time.h +do : + as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` +ac_fn_c_check_header_mongrel "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default" +if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : + cat >>confdefs.h <<_ACEOF +#define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 +_ACEOF + +fi + +done + +for ac_header in arpa/inet.h netinet/in.h +do : + as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` +ac_fn_c_check_header_mongrel "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default" +if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : + cat >>confdefs.h <<_ACEOF +#define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 +_ACEOF + +fi + +done + +for ac_header in sys/un.h +do : + ac_fn_c_check_header_mongrel "$LINENO" "sys/un.h" "ac_cv_header_sys_un_h" "$ac_includes_default" +if test "x$ac_cv_header_sys_un_h" = xyes; then : + cat >>confdefs.h <<_ACEOF +#define HAVE_SYS_UN_H 1 +_ACEOF + have_sys_un_h=yes +else + have_sys_un_h=no +fi + +done + + if test "x$have_sys_un_h" = xyes; then + HAVE_SYS_UN_H_TRUE= + HAVE_SYS_UN_H_FALSE='#' +else + HAVE_SYS_UN_H_TRUE='#' + HAVE_SYS_UN_H_FALSE= +fi + + +case $host in + *-*-cygwin* | *-*-cegcc*) + # These are POSIX-like systems using BSD-like sockets API. + ;; + *) + for ac_header in windows.h winsock2.h ws2tcpip.h +do : + as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` +ac_fn_c_check_header_mongrel "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default" +if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : + cat >>confdefs.h <<_ACEOF +#define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 +_ACEOF + +fi + +done + + ;; +esac + +case $host in + *darwin*|*interix*) + { $as_echo "$as_me:${as_lineno-$LINENO}: poll use is disabled on this platform" >&5 +$as_echo "$as_me: poll use is disabled on this platform" >&6;} + ;; + *) + for ac_func in poll +do : + ac_fn_c_check_func "$LINENO" "poll" "ac_cv_func_poll" +if test "x$ac_cv_func_poll" = xyes; then : + cat >>confdefs.h <<_ACEOF +#define HAVE_POLL 1 +_ACEOF + +fi +done + + ;; +esac + +for ac_func in gettimeofday select strtoll +do : + as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh` +ac_fn_c_check_func "$LINENO" "$ac_func" "$as_ac_var" +if eval test \"x\$"$as_ac_var"\" = x"yes"; then : + cat >>confdefs.h <<_ACEOF +#define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1 +_ACEOF + +fi +done + + +if test "$ac_cv_func_select" != "yes"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for select in ws2_32" >&5 +$as_echo_n "checking for select in ws2_32... " >&6; } + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +#ifdef HAVE_WINSOCK2_H +#ifndef WIN32_LEAN_AND_MEAN +#define WIN32_LEAN_AND_MEAN +#endif +#include +#endif + +int +main () +{ + + select(0,(fd_set *)NULL,(fd_set *)NULL,(fd_set *)NULL,(struct timeval *)NULL); + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + HAVE_SELECT="1" + +cat >>confdefs.h <<_ACEOF +#define HAVE_SELECT 1 +_ACEOF + + +else + + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +fi + +ac_fn_c_check_type "$LINENO" "size_t" "ac_cv_type_size_t" "$ac_includes_default" +if test "x$ac_cv_type_size_t" = xyes; then : + +else + +cat >>confdefs.h <<_ACEOF +#define size_t unsigned int +_ACEOF + +fi + +# The Ultrix 4.2 mips builtin alloca declared by alloca.h only works +# for constant arguments. Useless! +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for working alloca.h" >&5 +$as_echo_n "checking for working alloca.h... " >&6; } +if ${ac_cv_working_alloca_h+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +int +main () +{ +char *p = (char *) alloca (2 * sizeof (int)); + if (p) return 0; + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + ac_cv_working_alloca_h=yes +else + ac_cv_working_alloca_h=no +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_working_alloca_h" >&5 +$as_echo "$ac_cv_working_alloca_h" >&6; } +if test $ac_cv_working_alloca_h = yes; then + +$as_echo "#define HAVE_ALLOCA_H 1" >>confdefs.h + +fi + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for alloca" >&5 +$as_echo_n "checking for alloca... " >&6; } +if ${ac_cv_func_alloca_works+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#ifdef __GNUC__ +# define alloca __builtin_alloca +#else +# ifdef _MSC_VER +# include +# define alloca _alloca +# else +# ifdef HAVE_ALLOCA_H +# include +# else +# ifdef _AIX + #pragma alloca +# else +# ifndef alloca /* predefined by HP cc +Olibcalls */ +void *alloca (size_t); +# endif +# endif +# endif +# endif +#endif + +int +main () +{ +char *p = (char *) alloca (1); + if (p) return 0; + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + ac_cv_func_alloca_works=yes +else + ac_cv_func_alloca_works=no +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_alloca_works" >&5 +$as_echo "$ac_cv_func_alloca_works" >&6; } + +if test $ac_cv_func_alloca_works = yes; then + +$as_echo "#define HAVE_ALLOCA 1" >>confdefs.h + +else + # The SVR3 libPW and SVR4 libucb both contain incompatible functions +# that cause trouble. Some versions do not even contain alloca or +# contain a buggy version. If you still want to use their alloca, +# use ar to extract alloca.o from them instead of compiling alloca.c. + +ALLOCA=\${LIBOBJDIR}alloca.$ac_objext + +$as_echo "#define C_ALLOCA 1" >>confdefs.h + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether \`alloca.c' needs Cray hooks" >&5 +$as_echo_n "checking whether \`alloca.c' needs Cray hooks... " >&6; } +if ${ac_cv_os_cray+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#if defined CRAY && ! defined CRAY2 +webecray +#else +wenotbecray +#endif + +_ACEOF +if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | + $EGREP "webecray" >/dev/null 2>&1; then : + ac_cv_os_cray=yes +else + ac_cv_os_cray=no +fi +rm -f conftest* + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_os_cray" >&5 +$as_echo "$ac_cv_os_cray" >&6; } +if test $ac_cv_os_cray = yes; then + for ac_func in _getb67 GETB67 getb67; do + as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh` +ac_fn_c_check_func "$LINENO" "$ac_func" "$as_ac_var" +if eval test \"x\$"$as_ac_var"\" = x"yes"; then : + +cat >>confdefs.h <<_ACEOF +#define CRAY_STACKSEG_END $ac_func +_ACEOF + + break +fi + + done +fi + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking stack direction for C alloca" >&5 +$as_echo_n "checking stack direction for C alloca... " >&6; } +if ${ac_cv_c_stack_direction+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test "$cross_compiling" = yes; then : + ac_cv_c_stack_direction=0 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +$ac_includes_default +int +find_stack_direction (int *addr, int depth) +{ + int dir, dummy = 0; + if (! addr) + addr = &dummy; + *addr = addr < &dummy ? 1 : addr == &dummy ? 0 : -1; + dir = depth ? find_stack_direction (addr, depth - 1) : 0; + return dir + dummy; +} + +int +main (int argc, char **argv) +{ + return find_stack_direction (0, argc + !argv + 20) < 0; +} +_ACEOF +if ac_fn_c_try_run "$LINENO"; then : + ac_cv_c_stack_direction=1 +else + ac_cv_c_stack_direction=-1 +fi +rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ + conftest.$ac_objext conftest.beam conftest.$ac_ext +fi + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_stack_direction" >&5 +$as_echo "$ac_cv_c_stack_direction" >&6; } +cat >>confdefs.h <<_ACEOF +#define STACK_DIRECTION $ac_cv_c_stack_direction +_ACEOF + + +fi + + +# Checks for typedefs, structures, and compiler characteristics. +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for an ANSI C-conforming const" >&5 +$as_echo_n "checking for an ANSI C-conforming const... " >&6; } +if ${ac_cv_c_const+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + +#ifndef __cplusplus + /* Ultrix mips cc rejects this sort of thing. */ + typedef int charset[2]; + const charset cs = { 0, 0 }; + /* SunOS 4.1.1 cc rejects this. */ + char const *const *pcpcc; + char **ppc; + /* NEC SVR4.0.2 mips cc rejects this. */ + struct point {int x, y;}; + static struct point const zero = {0,0}; + /* AIX XL C 1.02.0.0 rejects this. + It does not let you subtract one const X* pointer from another in + an arm of an if-expression whose if-part is not a constant + expression */ + const char *g = "string"; + pcpcc = &g + (g ? g-g : 0); + /* HPUX 7.0 cc rejects these. */ + ++pcpcc; + ppc = (char**) pcpcc; + pcpcc = (char const *const *) ppc; + { /* SCO 3.2v4 cc rejects this sort of thing. */ + char tx; + char *t = &tx; + char const *s = 0 ? (char *) 0 : (char const *) 0; + + *t++ = 0; + if (s) return 0; + } + { /* Someone thinks the Sun supposedly-ANSI compiler will reject this. */ + int x[] = {25, 17}; + const int *foo = &x[0]; + ++foo; + } + { /* Sun SC1.0 ANSI compiler rejects this -- but not the above. */ + typedef const int *iptr; + iptr p = 0; + ++p; + } + { /* AIX XL C 1.02.0.0 rejects this sort of thing, saying + "k.c", line 2.27: 1506-025 (S) Operand must be a modifiable lvalue. */ + struct s { int j; const int *ap[3]; } bx; + struct s *b = &bx; b->j = 5; + } + { /* ULTRIX-32 V3.1 (Rev 9) vcc rejects this */ + const int foo = 10; + if (!foo) return 0; + } + return !cs[0] && !zero.x; +#endif + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_c_const=yes +else + ac_cv_c_const=no +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_const" >&5 +$as_echo "$ac_cv_c_const" >&6; } +if test $ac_cv_c_const = no; then + +$as_echo "#define const /**/" >>confdefs.h + +fi + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for inline" >&5 +$as_echo_n "checking for inline... " >&6; } +if ${ac_cv_c_inline+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_cv_c_inline=no +for ac_kw in inline __inline__ __inline; do + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#ifndef __cplusplus +typedef int foo_t; +static $ac_kw foo_t static_foo () {return 0; } +$ac_kw foo_t foo () {return 0; } +#endif + +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_c_inline=$ac_kw +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + test "$ac_cv_c_inline" != no && break +done + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_inline" >&5 +$as_echo "$ac_cv_c_inline" >&6; } + +case $ac_cv_c_inline in + inline | yes) ;; + *) + case $ac_cv_c_inline in + no) ac_val=;; + *) ac_val=$ac_cv_c_inline;; + esac + cat >>confdefs.h <<_ACEOF +#ifndef __cplusplus +#define inline $ac_val +#endif +_ACEOF + ;; +esac + + + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking non-blocking sockets style" >&5 +$as_echo_n "checking non-blocking sockets style... " >&6; } + + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* headers for O_NONBLOCK test */ +#include +#include +#include + +int +main () +{ + +/* try to compile O_NONBLOCK */ + +#if defined(sun) || defined(__sun__) || defined(__SUNPRO_C) || defined(__SUNPRO_CC) +# if defined(__SVR4) || defined(__srv4__) +# define PLATFORM_SOLARIS +# else +# define PLATFORM_SUNOS4 +# endif +#endif +#if (defined(_AIX) || defined(__xlC__)) && !defined(_AIX41) +# define PLATFORM_AIX_V3 +#endif + +#if defined(PLATFORM_SUNOS4) || defined(PLATFORM_AIX_V3) || defined(__BEOS__) +#error "O_NONBLOCK does not work on this platform" +#endif + int socket; + int flags = fcntl(socket, F_SETFL, flags | O_NONBLOCK); + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + +nonblock="O_NONBLOCK" + +$as_echo "#define HAVE_O_NONBLOCK 1" >>confdefs.h + + +else + + + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* headers for FIONBIO test */ +#include +#include + +int +main () +{ + +/* FIONBIO source test (old-style unix) */ + int socket; + int flags = ioctl(socket, FIONBIO, &flags); + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + +nonblock="FIONBIO" + +$as_echo "#define HAVE_FIONBIO 1" >>confdefs.h + + +else + + + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* headers for ioctlsocket test (Windows) */ +#undef inline +#ifdef HAVE_WINDOWS_H +#ifndef WIN32_LEAN_AND_MEAN +#define WIN32_LEAN_AND_MEAN +#endif +#include +#ifdef HAVE_WINSOCK2_H +#include +#else +#ifdef HAVE_WINSOCK_H +#include +#endif +#endif +#endif + +int +main () +{ + +/* ioctlsocket source code */ + SOCKET sd; + unsigned long flags = 0; + sd = socket(0, 0, 0); + ioctlsocket(sd, FIONBIO, &flags); + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + +nonblock="ioctlsocket" + +$as_echo "#define HAVE_IOCTLSOCKET 1" >>confdefs.h + + +else + + + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* headers for IoctlSocket test (Amiga?) */ +#include + +int +main () +{ + +/* IoctlSocket source code */ + int socket; + int flags = IoctlSocket(socket, FIONBIO, (long)1); + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + +nonblock="IoctlSocket" + +$as_echo "#define HAVE_IOCTLSOCKET_CASE 1" >>confdefs.h + + +else + + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* headers for SO_NONBLOCK test (BeOS) */ +#include + +int +main () +{ + +/* SO_NONBLOCK source code */ + long b = 1; + int socket; + int flags = setsockopt(socket, SOL_SOCKET, SO_NONBLOCK, &b, sizeof(b)); + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + +nonblock="SO_NONBLOCK" + +$as_echo "#define HAVE_SO_NONBLOCK 1" >>confdefs.h + + +else + +nonblock="nada" + +$as_echo "#define HAVE_DISABLED_NONBLOCKING 1" >>confdefs.h + + +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + + +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext + + +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + + +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + + +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $nonblock" >&5 +$as_echo "$nonblock" >&6; } + + if test "$nonblock" = "nada"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: non-block sockets disabled" >&5 +$as_echo "$as_me: WARNING: non-block sockets disabled" >&2;} + fi + + +ac_config_files="$ac_config_files Makefile src/Makefile tests/Makefile example/Makefile docs/Makefile libssh2.pc" + +cat >confcache <<\_ACEOF +# This file is a shell script that caches the results of configure +# tests run on this system so they can be shared between configure +# scripts and configure runs, see configure's option --config-cache. +# It is not useful on other systems. If it contains results you don't +# want to keep, you may remove or edit it. +# +# config.status only pays attention to the cache file if you give it +# the --recheck option to rerun configure. +# +# `ac_cv_env_foo' variables (set or unset) will be overridden when +# loading this file, other *unset* `ac_cv_foo' will be assigned the +# following values. + +_ACEOF + +# The following way of writing the cache mishandles newlines in values, +# but we know of no workaround that is simple, portable, and efficient. +# So, we kill variables containing newlines. +# Ultrix sh set writes to stderr and can't be redirected directly, +# and sets the high bit in the cache file unless we assign to the vars. +( + for ac_var in `(set) 2>&1 | sed -n 's/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'`; do + eval ac_val=\$$ac_var + case $ac_val in #( + *${as_nl}*) + case $ac_var in #( + *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 +$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; + esac + case $ac_var in #( + _ | IFS | as_nl) ;; #( + BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( + *) { eval $ac_var=; unset $ac_var;} ;; + esac ;; + esac + done + + (set) 2>&1 | + case $as_nl`(ac_space=' '; set) 2>&1` in #( + *${as_nl}ac_space=\ *) + # `set' does not quote correctly, so add quotes: double-quote + # substitution turns \\\\ into \\, and sed turns \\ into \. + sed -n \ + "s/'/'\\\\''/g; + s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\\2'/p" + ;; #( + *) + # `set' quotes correctly as required by POSIX, so do not add quotes. + sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" + ;; + esac | + sort +) | + sed ' + /^ac_cv_env_/b end + t clear + :clear + s/^\([^=]*\)=\(.*[{}].*\)$/test "${\1+set}" = set || &/ + t end + s/^\([^=]*\)=\(.*\)$/\1=${\1=\2}/ + :end' >>confcache +if diff "$cache_file" confcache >/dev/null 2>&1; then :; else + if test -w "$cache_file"; then + if test "x$cache_file" != "x/dev/null"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: updating cache $cache_file" >&5 +$as_echo "$as_me: updating cache $cache_file" >&6;} + if test ! -f "$cache_file" || test -h "$cache_file"; then + cat confcache >"$cache_file" + else + case $cache_file in #( + */* | ?:*) + mv -f confcache "$cache_file"$$ && + mv -f "$cache_file"$$ "$cache_file" ;; #( + *) + mv -f confcache "$cache_file" ;; + esac + fi + fi + else + { $as_echo "$as_me:${as_lineno-$LINENO}: not updating unwritable cache $cache_file" >&5 +$as_echo "$as_me: not updating unwritable cache $cache_file" >&6;} + fi +fi +rm -f confcache + +test "x$prefix" = xNONE && prefix=$ac_default_prefix +# Let make expand exec_prefix. +test "x$exec_prefix" = xNONE && exec_prefix='${prefix}' + +DEFS=-DHAVE_CONFIG_H + +ac_libobjs= +ac_ltlibobjs= +U= +for ac_i in : $LIBOBJS; do test "x$ac_i" = x: && continue + # 1. Remove the extension, and $U if already installed. + ac_script='s/\$U\././;s/\.o$//;s/\.obj$//' + ac_i=`$as_echo "$ac_i" | sed "$ac_script"` + # 2. Prepend LIBOBJDIR. When used with automake>=1.10 LIBOBJDIR + # will be set to the directory where LIBOBJS objects are built. + as_fn_append ac_libobjs " \${LIBOBJDIR}$ac_i\$U.$ac_objext" + as_fn_append ac_ltlibobjs " \${LIBOBJDIR}$ac_i"'$U.lo' +done +LIBOBJS=$ac_libobjs + +LTLIBOBJS=$ac_ltlibobjs + + +if test -z "${MAINTAINER_MODE_TRUE}" && test -z "${MAINTAINER_MODE_FALSE}"; then + as_fn_error $? "conditional \"MAINTAINER_MODE\" was never defined. +Usually this means the macro was only invoked conditionally." "$LINENO" 5 +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking that generated files are newer than configure" >&5 +$as_echo_n "checking that generated files are newer than configure... " >&6; } + if test -n "$am_sleep_pid"; then + # Hide warnings about reused PIDs. + wait $am_sleep_pid 2>/dev/null + fi + { $as_echo "$as_me:${as_lineno-$LINENO}: result: done" >&5 +$as_echo "done" >&6; } + if test -n "$EXEEXT"; then + am__EXEEXT_TRUE= + am__EXEEXT_FALSE='#' +else + am__EXEEXT_TRUE='#' + am__EXEEXT_FALSE= +fi + +if test -z "${AMDEP_TRUE}" && test -z "${AMDEP_FALSE}"; then + as_fn_error $? "conditional \"AMDEP\" was never defined. +Usually this means the macro was only invoked conditionally." "$LINENO" 5 +fi +if test -z "${am__fastdepCC_TRUE}" && test -z "${am__fastdepCC_FALSE}"; then + as_fn_error $? "conditional \"am__fastdepCC\" was never defined. +Usually this means the macro was only invoked conditionally." "$LINENO" 5 +fi +if test -z "${am__fastdepCC_TRUE}" && test -z "${am__fastdepCC_FALSE}"; then + as_fn_error $? "conditional \"am__fastdepCC\" was never defined. +Usually this means the macro was only invoked conditionally." "$LINENO" 5 +fi +if test -z "${SSHD_TRUE}" && test -z "${SSHD_FALSE}"; then + as_fn_error $? "conditional \"SSHD\" was never defined. +Usually this means the macro was only invoked conditionally." "$LINENO" 5 +fi + +if test -z "${OPENSSL_TRUE}" && test -z "${OPENSSL_FALSE}"; then + as_fn_error $? "conditional \"OPENSSL\" was never defined. +Usually this means the macro was only invoked conditionally." "$LINENO" 5 +fi +if test -z "${LIBGCRYPT_TRUE}" && test -z "${LIBGCRYPT_FALSE}"; then + as_fn_error $? "conditional \"LIBGCRYPT\" was never defined. +Usually this means the macro was only invoked conditionally." "$LINENO" 5 +fi +if test -z "${WINCNG_TRUE}" && test -z "${WINCNG_FALSE}"; then + as_fn_error $? "conditional \"WINCNG\" was never defined. +Usually this means the macro was only invoked conditionally." "$LINENO" 5 +fi +if test -z "${OS400QC3_TRUE}" && test -z "${OS400QC3_FALSE}"; then + as_fn_error $? "conditional \"OS400QC3\" was never defined. +Usually this means the macro was only invoked conditionally." "$LINENO" 5 +fi +if test -z "${BUILD_EXAMPLES_TRUE}" && test -z "${BUILD_EXAMPLES_FALSE}"; then + as_fn_error $? "conditional \"BUILD_EXAMPLES\" was never defined. +Usually this means the macro was only invoked conditionally." "$LINENO" 5 +fi +if test -z "${HAVE_SYS_UN_H_TRUE}" && test -z "${HAVE_SYS_UN_H_FALSE}"; then + as_fn_error $? "conditional \"HAVE_SYS_UN_H\" was never defined. +Usually this means the macro was only invoked conditionally." "$LINENO" 5 +fi + +: "${CONFIG_STATUS=./config.status}" +ac_write_fail=0 +ac_clean_files_save=$ac_clean_files +ac_clean_files="$ac_clean_files $CONFIG_STATUS" +{ $as_echo "$as_me:${as_lineno-$LINENO}: creating $CONFIG_STATUS" >&5 +$as_echo "$as_me: creating $CONFIG_STATUS" >&6;} +as_write_fail=0 +cat >$CONFIG_STATUS <<_ASEOF || as_write_fail=1 +#! $SHELL +# Generated by $as_me. +# Run this file to recreate the current configuration. +# Compiler output produced by configure, useful for debugging +# configure, is in config.log if it exists. + +debug=false +ac_cs_recheck=false +ac_cs_silent=false + +SHELL=\${CONFIG_SHELL-$SHELL} +export SHELL +_ASEOF +cat >>$CONFIG_STATUS <<\_ASEOF || as_write_fail=1 +## -------------------- ## +## M4sh Initialization. ## +## -------------------- ## + +# Be more Bourne compatible +DUALCASE=1; export DUALCASE # for MKS sh +if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : + emulate sh + NULLCMD=: + # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which + # is contrary to our usage. Disable this feature. + alias -g '${1+"$@"}'='"$@"' + setopt NO_GLOB_SUBST +else + case `(set -o) 2>/dev/null` in #( + *posix*) : + set -o posix ;; #( + *) : + ;; +esac +fi + + +as_nl=' +' +export as_nl +# Printing a long string crashes Solaris 7 /usr/bin/printf. +as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' +as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo +as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo +# Prefer a ksh shell builtin over an external printf program on Solaris, +# but without wasting forks for bash or zsh. +if test -z "$BASH_VERSION$ZSH_VERSION" \ + && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then + as_echo='print -r --' + as_echo_n='print -rn --' +elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then + as_echo='printf %s\n' + as_echo_n='printf %s' +else + if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then + as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' + as_echo_n='/usr/ucb/echo -n' + else + as_echo_body='eval expr "X$1" : "X\\(.*\\)"' + as_echo_n_body='eval + arg=$1; + case $arg in #( + *"$as_nl"*) + expr "X$arg" : "X\\(.*\\)$as_nl"; + arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; + esac; + expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" + ' + export as_echo_n_body + as_echo_n='sh -c $as_echo_n_body as_echo' + fi + export as_echo_body + as_echo='sh -c $as_echo_body as_echo' +fi + +# The user is always right. +if test "${PATH_SEPARATOR+set}" != set; then + PATH_SEPARATOR=: + (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { + (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || + PATH_SEPARATOR=';' + } +fi + + +# IFS +# We need space, tab and new line, in precisely that order. Quoting is +# there to prevent editors from complaining about space-tab. +# (If _AS_PATH_WALK were called with IFS unset, it would disable word +# splitting by setting IFS to empty value.) +IFS=" "" $as_nl" + +# Find who we are. Look in the path if we contain no directory separator. +as_myself= +case $0 in #(( + *[\\/]* ) as_myself=$0 ;; + *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break + done +IFS=$as_save_IFS + + ;; +esac +# We did not find ourselves, most probably we were run as `sh COMMAND' +# in which case we are not to be found in the path. +if test "x$as_myself" = x; then + as_myself=$0 +fi +if test ! -f "$as_myself"; then + $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 + exit 1 +fi + +# Unset variables that we do not need and which cause bugs (e.g. in +# pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" +# suppresses any "Segmentation fault" message there. '((' could +# trigger a bug in pdksh 5.2.14. +for as_var in BASH_ENV ENV MAIL MAILPATH +do eval test x\${$as_var+set} = xset \ + && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : +done +PS1='$ ' +PS2='> ' +PS4='+ ' + +# NLS nuisances. +LC_ALL=C +export LC_ALL +LANGUAGE=C +export LANGUAGE + +# CDPATH. +(unset CDPATH) >/dev/null 2>&1 && unset CDPATH + + +# as_fn_error STATUS ERROR [LINENO LOG_FD] +# ---------------------------------------- +# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are +# provided, also output the error to LOG_FD, referencing LINENO. Then exit the +# script with STATUS, using 1 if that was 0. +as_fn_error () +{ + as_status=$1; test $as_status -eq 0 && as_status=1 + if test "$4"; then + as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 + fi + $as_echo "$as_me: error: $2" >&2 + as_fn_exit $as_status +} # as_fn_error + + +# as_fn_set_status STATUS +# ----------------------- +# Set $? to STATUS, without forking. +as_fn_set_status () +{ + return $1 +} # as_fn_set_status + +# as_fn_exit STATUS +# ----------------- +# Exit the shell with STATUS, even in a "trap 0" or "set -e" context. +as_fn_exit () +{ + set +e + as_fn_set_status $1 + exit $1 +} # as_fn_exit + +# as_fn_unset VAR +# --------------- +# Portably unset VAR. +as_fn_unset () +{ + { eval $1=; unset $1;} +} +as_unset=as_fn_unset +# as_fn_append VAR VALUE +# ---------------------- +# Append the text in VALUE to the end of the definition contained in VAR. Take +# advantage of any shell optimizations that allow amortized linear growth over +# repeated appends, instead of the typical quadratic growth present in naive +# implementations. +if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : + eval 'as_fn_append () + { + eval $1+=\$2 + }' +else + as_fn_append () + { + eval $1=\$$1\$2 + } +fi # as_fn_append + +# as_fn_arith ARG... +# ------------------ +# Perform arithmetic evaluation on the ARGs, and store the result in the +# global $as_val. Take advantage of shells that can avoid forks. The arguments +# must be portable across $(()) and expr. +if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : + eval 'as_fn_arith () + { + as_val=$(( $* )) + }' +else + as_fn_arith () + { + as_val=`expr "$@" || test $? -eq 1` + } +fi # as_fn_arith + + +if expr a : '\(a\)' >/dev/null 2>&1 && + test "X`expr 00001 : '.*\(...\)'`" = X001; then + as_expr=expr +else + as_expr=false +fi + +if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then + as_basename=basename +else + as_basename=false +fi + +if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then + as_dirname=dirname +else + as_dirname=false +fi + +as_me=`$as_basename -- "$0" || +$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ + X"$0" : 'X\(//\)$' \| \ + X"$0" : 'X\(/\)' \| . 2>/dev/null || +$as_echo X/"$0" | + sed '/^.*\/\([^/][^/]*\)\/*$/{ + s//\1/ + q + } + /^X\/\(\/\/\)$/{ + s//\1/ + q + } + /^X\/\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + +# Avoid depending upon Character Ranges. +as_cr_letters='abcdefghijklmnopqrstuvwxyz' +as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' +as_cr_Letters=$as_cr_letters$as_cr_LETTERS +as_cr_digits='0123456789' +as_cr_alnum=$as_cr_Letters$as_cr_digits + +ECHO_C= ECHO_N= ECHO_T= +case `echo -n x` in #((((( +-n*) + case `echo 'xy\c'` in + *c*) ECHO_T=' ';; # ECHO_T is single tab character. + xy) ECHO_C='\c';; + *) echo `echo ksh88 bug on AIX 6.1` > /dev/null + ECHO_T=' ';; + esac;; +*) + ECHO_N='-n';; +esac + +rm -f conf$$ conf$$.exe conf$$.file +if test -d conf$$.dir; then + rm -f conf$$.dir/conf$$.file +else + rm -f conf$$.dir + mkdir conf$$.dir 2>/dev/null +fi +if (echo >conf$$.file) 2>/dev/null; then + if ln -s conf$$.file conf$$ 2>/dev/null; then + as_ln_s='ln -s' + # ... but there are two gotchas: + # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. + # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. + # In both cases, we have to default to `cp -pR'. + ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || + as_ln_s='cp -pR' + elif ln conf$$.file conf$$ 2>/dev/null; then + as_ln_s=ln + else + as_ln_s='cp -pR' + fi +else + as_ln_s='cp -pR' +fi +rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file +rmdir conf$$.dir 2>/dev/null + + +# as_fn_mkdir_p +# ------------- +# Create "$as_dir" as a directory, including parents if necessary. +as_fn_mkdir_p () +{ + + case $as_dir in #( + -*) as_dir=./$as_dir;; + esac + test -d "$as_dir" || eval $as_mkdir_p || { + as_dirs= + while :; do + case $as_dir in #( + *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( + *) as_qdir=$as_dir;; + esac + as_dirs="'$as_qdir' $as_dirs" + as_dir=`$as_dirname -- "$as_dir" || +$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$as_dir" : 'X\(//\)[^/]' \| \ + X"$as_dir" : 'X\(//\)$' \| \ + X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || +$as_echo X"$as_dir" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + test -d "$as_dir" && break + done + test -z "$as_dirs" || eval "mkdir $as_dirs" + } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" + + +} # as_fn_mkdir_p +if mkdir -p . 2>/dev/null; then + as_mkdir_p='mkdir -p "$as_dir"' +else + test -d ./-p && rmdir ./-p + as_mkdir_p=false +fi + + +# as_fn_executable_p FILE +# ----------------------- +# Test if FILE is an executable regular file. +as_fn_executable_p () +{ + test -f "$1" && test -x "$1" +} # as_fn_executable_p +as_test_x='test -x' +as_executable_p=as_fn_executable_p + +# Sed expression to map a string onto a valid CPP name. +as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" + +# Sed expression to map a string onto a valid variable name. +as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" + + +exec 6>&1 +## ----------------------------------- ## +## Main body of $CONFIG_STATUS script. ## +## ----------------------------------- ## +_ASEOF +test $as_write_fail = 0 && chmod +x $CONFIG_STATUS || ac_write_fail=1 + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +# Save the log message, to keep $0 and so on meaningful, and to +# report actual input values of CONFIG_FILES etc. instead of their +# values after options handling. +ac_log=" +This file was extended by libssh2 $as_me -, which was +generated by GNU Autoconf 2.69. Invocation command line was + + CONFIG_FILES = $CONFIG_FILES + CONFIG_HEADERS = $CONFIG_HEADERS + CONFIG_LINKS = $CONFIG_LINKS + CONFIG_COMMANDS = $CONFIG_COMMANDS + $ $0 $@ + +on `(hostname || uname -n) 2>/dev/null | sed 1q` +" + +_ACEOF + +case $ac_config_files in *" +"*) set x $ac_config_files; shift; ac_config_files=$*;; +esac + +case $ac_config_headers in *" +"*) set x $ac_config_headers; shift; ac_config_headers=$*;; +esac + + +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +# Files that config.status was made for. +config_files="$ac_config_files" +config_headers="$ac_config_headers" +config_commands="$ac_config_commands" + +_ACEOF + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +ac_cs_usage="\ +\`$as_me' instantiates files and other configuration actions +from templates according to the current configuration. Unless the files +and actions are specified as TAGs, all are instantiated by default. + +Usage: $0 [OPTION]... [TAG]... + + -h, --help print this help, then exit + -V, --version print version number and configuration settings, then exit + --config print configuration, then exit + -q, --quiet, --silent + do not print progress messages + -d, --debug don't remove temporary files + --recheck update $as_me by reconfiguring in the same conditions + --file=FILE[:TEMPLATE] + instantiate the configuration file FILE + --header=FILE[:TEMPLATE] + instantiate the configuration header FILE + +Configuration files: +$config_files + +Configuration headers: +$config_headers + +Configuration commands: +$config_commands + +Report bugs to ." + +_ACEOF +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`" +ac_cs_version="\\ +libssh2 config.status - +configured by $0, generated by GNU Autoconf 2.69, + with options \\"\$ac_cs_config\\" + +Copyright (C) 2012 Free Software Foundation, Inc. +This config.status script is free software; the Free Software Foundation +gives unlimited permission to copy, distribute and modify it." + +ac_pwd='$ac_pwd' +srcdir='$srcdir' +INSTALL='$INSTALL' +MKDIR_P='$MKDIR_P' +AWK='$AWK' +test -n "\$AWK" || AWK=awk +_ACEOF + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +# The default lists apply if the user does not specify any file. +ac_need_defaults=: +while test $# != 0 +do + case $1 in + --*=?*) + ac_option=`expr "X$1" : 'X\([^=]*\)='` + ac_optarg=`expr "X$1" : 'X[^=]*=\(.*\)'` + ac_shift=: + ;; + --*=) + ac_option=`expr "X$1" : 'X\([^=]*\)='` + ac_optarg= + ac_shift=: + ;; + *) + ac_option=$1 + ac_optarg=$2 + ac_shift=shift + ;; + esac + + case $ac_option in + # Handling of the options. + -recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r) + ac_cs_recheck=: ;; + --version | --versio | --versi | --vers | --ver | --ve | --v | -V ) + $as_echo "$ac_cs_version"; exit ;; + --config | --confi | --conf | --con | --co | --c ) + $as_echo "$ac_cs_config"; exit ;; + --debug | --debu | --deb | --de | --d | -d ) + debug=: ;; + --file | --fil | --fi | --f ) + $ac_shift + case $ac_optarg in + *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; + '') as_fn_error $? "missing file argument" ;; + esac + as_fn_append CONFIG_FILES " '$ac_optarg'" + ac_need_defaults=false;; + --header | --heade | --head | --hea ) + $ac_shift + case $ac_optarg in + *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; + esac + as_fn_append CONFIG_HEADERS " '$ac_optarg'" + ac_need_defaults=false;; + --he | --h) + # Conflict between --help and --header + as_fn_error $? "ambiguous option: \`$1' +Try \`$0 --help' for more information.";; + --help | --hel | -h ) + $as_echo "$ac_cs_usage"; exit ;; + -q | -quiet | --quiet | --quie | --qui | --qu | --q \ + | -silent | --silent | --silen | --sile | --sil | --si | --s) + ac_cs_silent=: ;; + + # This is an error. + -*) as_fn_error $? "unrecognized option: \`$1' +Try \`$0 --help' for more information." ;; + + *) as_fn_append ac_config_targets " $1" + ac_need_defaults=false ;; + + esac + shift +done + +ac_configure_extra_args= + +if $ac_cs_silent; then + exec 6>/dev/null + ac_configure_extra_args="$ac_configure_extra_args --silent" +fi + +_ACEOF +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +if \$ac_cs_recheck; then + set X $SHELL '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion + shift + \$as_echo "running CONFIG_SHELL=$SHELL \$*" >&6 + CONFIG_SHELL='$SHELL' + export CONFIG_SHELL + exec "\$@" +fi + +_ACEOF +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +exec 5>>config.log +{ + echo + sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX +## Running $as_me. ## +_ASBOX + $as_echo "$ac_log" +} >&5 + +_ACEOF +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +# +# INIT-COMMANDS +# +AMDEP_TRUE="$AMDEP_TRUE" ac_aux_dir="$ac_aux_dir" + + +# The HP-UX ksh and POSIX shell print the target directory to stdout +# if CDPATH is set. +(unset CDPATH) >/dev/null 2>&1 && unset CDPATH + +sed_quote_subst='$sed_quote_subst' +double_quote_subst='$double_quote_subst' +delay_variable_subst='$delay_variable_subst' +AS='`$ECHO "$AS" | $SED "$delay_single_quote_subst"`' +DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`' +OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`' +macro_version='`$ECHO "$macro_version" | $SED "$delay_single_quote_subst"`' +macro_revision='`$ECHO "$macro_revision" | $SED "$delay_single_quote_subst"`' +enable_shared='`$ECHO "$enable_shared" | $SED "$delay_single_quote_subst"`' +enable_static='`$ECHO "$enable_static" | $SED "$delay_single_quote_subst"`' +pic_mode='`$ECHO "$pic_mode" | $SED "$delay_single_quote_subst"`' +enable_fast_install='`$ECHO "$enable_fast_install" | $SED "$delay_single_quote_subst"`' +SHELL='`$ECHO "$SHELL" | $SED "$delay_single_quote_subst"`' +ECHO='`$ECHO "$ECHO" | $SED "$delay_single_quote_subst"`' +PATH_SEPARATOR='`$ECHO "$PATH_SEPARATOR" | $SED "$delay_single_quote_subst"`' +host_alias='`$ECHO "$host_alias" | $SED "$delay_single_quote_subst"`' +host='`$ECHO "$host" | $SED "$delay_single_quote_subst"`' +host_os='`$ECHO "$host_os" | $SED "$delay_single_quote_subst"`' +build_alias='`$ECHO "$build_alias" | $SED "$delay_single_quote_subst"`' +build='`$ECHO "$build" | $SED "$delay_single_quote_subst"`' +build_os='`$ECHO "$build_os" | $SED "$delay_single_quote_subst"`' +SED='`$ECHO "$SED" | $SED "$delay_single_quote_subst"`' +Xsed='`$ECHO "$Xsed" | $SED "$delay_single_quote_subst"`' +GREP='`$ECHO "$GREP" | $SED "$delay_single_quote_subst"`' +EGREP='`$ECHO "$EGREP" | $SED "$delay_single_quote_subst"`' +FGREP='`$ECHO "$FGREP" | $SED "$delay_single_quote_subst"`' +LD='`$ECHO "$LD" | $SED "$delay_single_quote_subst"`' +NM='`$ECHO "$NM" | $SED "$delay_single_quote_subst"`' +LN_S='`$ECHO "$LN_S" | $SED "$delay_single_quote_subst"`' +max_cmd_len='`$ECHO "$max_cmd_len" | $SED "$delay_single_quote_subst"`' +ac_objext='`$ECHO "$ac_objext" | $SED "$delay_single_quote_subst"`' +exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`' +lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`' +lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`' +lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`' +lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`' +lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`' +reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`' +reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`' +deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`' +file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`' +file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`' +want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`' +sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`' +AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`' +AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`' +archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`' +STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`' +RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`' +old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`' +old_postuninstall_cmds='`$ECHO "$old_postuninstall_cmds" | $SED "$delay_single_quote_subst"`' +old_archive_cmds='`$ECHO "$old_archive_cmds" | $SED "$delay_single_quote_subst"`' +lock_old_archive_extraction='`$ECHO "$lock_old_archive_extraction" | $SED "$delay_single_quote_subst"`' +CC='`$ECHO "$CC" | $SED "$delay_single_quote_subst"`' +CFLAGS='`$ECHO "$CFLAGS" | $SED "$delay_single_quote_subst"`' +compiler='`$ECHO "$compiler" | $SED "$delay_single_quote_subst"`' +GCC='`$ECHO "$GCC" | $SED "$delay_single_quote_subst"`' +lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$delay_single_quote_subst"`' +lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`' +lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`' +lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`' +nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`' +lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`' +objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`' +MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`' +lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`' +lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`' +lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`' +lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`' +lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`' +need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`' +MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`' +DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`' +NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`' +LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`' +OTOOL='`$ECHO "$OTOOL" | $SED "$delay_single_quote_subst"`' +OTOOL64='`$ECHO "$OTOOL64" | $SED "$delay_single_quote_subst"`' +libext='`$ECHO "$libext" | $SED "$delay_single_quote_subst"`' +shrext_cmds='`$ECHO "$shrext_cmds" | $SED "$delay_single_quote_subst"`' +extract_expsyms_cmds='`$ECHO "$extract_expsyms_cmds" | $SED "$delay_single_quote_subst"`' +archive_cmds_need_lc='`$ECHO "$archive_cmds_need_lc" | $SED "$delay_single_quote_subst"`' +enable_shared_with_static_runtimes='`$ECHO "$enable_shared_with_static_runtimes" | $SED "$delay_single_quote_subst"`' +export_dynamic_flag_spec='`$ECHO "$export_dynamic_flag_spec" | $SED "$delay_single_quote_subst"`' +whole_archive_flag_spec='`$ECHO "$whole_archive_flag_spec" | $SED "$delay_single_quote_subst"`' +compiler_needs_object='`$ECHO "$compiler_needs_object" | $SED "$delay_single_quote_subst"`' +old_archive_from_new_cmds='`$ECHO "$old_archive_from_new_cmds" | $SED "$delay_single_quote_subst"`' +old_archive_from_expsyms_cmds='`$ECHO "$old_archive_from_expsyms_cmds" | $SED "$delay_single_quote_subst"`' +archive_cmds='`$ECHO "$archive_cmds" | $SED "$delay_single_quote_subst"`' +archive_expsym_cmds='`$ECHO "$archive_expsym_cmds" | $SED "$delay_single_quote_subst"`' +module_cmds='`$ECHO "$module_cmds" | $SED "$delay_single_quote_subst"`' +module_expsym_cmds='`$ECHO "$module_expsym_cmds" | $SED "$delay_single_quote_subst"`' +with_gnu_ld='`$ECHO "$with_gnu_ld" | $SED "$delay_single_quote_subst"`' +allow_undefined_flag='`$ECHO "$allow_undefined_flag" | $SED "$delay_single_quote_subst"`' +no_undefined_flag='`$ECHO "$no_undefined_flag" | $SED "$delay_single_quote_subst"`' +hardcode_libdir_flag_spec='`$ECHO "$hardcode_libdir_flag_spec" | $SED "$delay_single_quote_subst"`' +hardcode_libdir_separator='`$ECHO "$hardcode_libdir_separator" | $SED "$delay_single_quote_subst"`' +hardcode_direct='`$ECHO "$hardcode_direct" | $SED "$delay_single_quote_subst"`' +hardcode_direct_absolute='`$ECHO "$hardcode_direct_absolute" | $SED "$delay_single_quote_subst"`' +hardcode_minus_L='`$ECHO "$hardcode_minus_L" | $SED "$delay_single_quote_subst"`' +hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_quote_subst"`' +hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`' +inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`' +link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`' +always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`' +export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`' +exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`' +include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`' +prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`' +postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`' +file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`' +variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`' +need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`' +need_version='`$ECHO "$need_version" | $SED "$delay_single_quote_subst"`' +version_type='`$ECHO "$version_type" | $SED "$delay_single_quote_subst"`' +runpath_var='`$ECHO "$runpath_var" | $SED "$delay_single_quote_subst"`' +shlibpath_var='`$ECHO "$shlibpath_var" | $SED "$delay_single_quote_subst"`' +shlibpath_overrides_runpath='`$ECHO "$shlibpath_overrides_runpath" | $SED "$delay_single_quote_subst"`' +libname_spec='`$ECHO "$libname_spec" | $SED "$delay_single_quote_subst"`' +library_names_spec='`$ECHO "$library_names_spec" | $SED "$delay_single_quote_subst"`' +soname_spec='`$ECHO "$soname_spec" | $SED "$delay_single_quote_subst"`' +install_override_mode='`$ECHO "$install_override_mode" | $SED "$delay_single_quote_subst"`' +postinstall_cmds='`$ECHO "$postinstall_cmds" | $SED "$delay_single_quote_subst"`' +postuninstall_cmds='`$ECHO "$postuninstall_cmds" | $SED "$delay_single_quote_subst"`' +finish_cmds='`$ECHO "$finish_cmds" | $SED "$delay_single_quote_subst"`' +finish_eval='`$ECHO "$finish_eval" | $SED "$delay_single_quote_subst"`' +hardcode_into_libs='`$ECHO "$hardcode_into_libs" | $SED "$delay_single_quote_subst"`' +sys_lib_search_path_spec='`$ECHO "$sys_lib_search_path_spec" | $SED "$delay_single_quote_subst"`' +sys_lib_dlsearch_path_spec='`$ECHO "$sys_lib_dlsearch_path_spec" | $SED "$delay_single_quote_subst"`' +hardcode_action='`$ECHO "$hardcode_action" | $SED "$delay_single_quote_subst"`' +enable_dlopen='`$ECHO "$enable_dlopen" | $SED "$delay_single_quote_subst"`' +enable_dlopen_self='`$ECHO "$enable_dlopen_self" | $SED "$delay_single_quote_subst"`' +enable_dlopen_self_static='`$ECHO "$enable_dlopen_self_static" | $SED "$delay_single_quote_subst"`' +old_striplib='`$ECHO "$old_striplib" | $SED "$delay_single_quote_subst"`' +striplib='`$ECHO "$striplib" | $SED "$delay_single_quote_subst"`' + +LTCC='$LTCC' +LTCFLAGS='$LTCFLAGS' +compiler='$compiler_DEFAULT' + +# A function that is used when there is no print builtin or printf. +func_fallback_echo () +{ + eval 'cat <<_LTECHO_EOF +\$1 +_LTECHO_EOF' +} + +# Quote evaled strings. +for var in AS \ +DLLTOOL \ +OBJDUMP \ +SHELL \ +ECHO \ +PATH_SEPARATOR \ +SED \ +GREP \ +EGREP \ +FGREP \ +LD \ +NM \ +LN_S \ +lt_SP2NL \ +lt_NL2SP \ +reload_flag \ +deplibs_check_method \ +file_magic_cmd \ +file_magic_glob \ +want_nocaseglob \ +sharedlib_from_linklib_cmd \ +AR \ +AR_FLAGS \ +archiver_list_spec \ +STRIP \ +RANLIB \ +CC \ +CFLAGS \ +compiler \ +lt_cv_sys_global_symbol_pipe \ +lt_cv_sys_global_symbol_to_cdecl \ +lt_cv_sys_global_symbol_to_c_name_address \ +lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \ +nm_file_list_spec \ +lt_prog_compiler_no_builtin_flag \ +lt_prog_compiler_pic \ +lt_prog_compiler_wl \ +lt_prog_compiler_static \ +lt_cv_prog_compiler_c_o \ +need_locks \ +MANIFEST_TOOL \ +DSYMUTIL \ +NMEDIT \ +LIPO \ +OTOOL \ +OTOOL64 \ +shrext_cmds \ +export_dynamic_flag_spec \ +whole_archive_flag_spec \ +compiler_needs_object \ +with_gnu_ld \ +allow_undefined_flag \ +no_undefined_flag \ +hardcode_libdir_flag_spec \ +hardcode_libdir_separator \ +exclude_expsyms \ +include_expsyms \ +file_list_spec \ +variables_saved_for_relink \ +libname_spec \ +library_names_spec \ +soname_spec \ +install_override_mode \ +finish_eval \ +old_striplib \ +striplib; do + case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in + *[\\\\\\\`\\"\\\$]*) + eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED \\"\\\$sed_quote_subst\\"\\\`\\\\\\"" + ;; + *) + eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\"" + ;; + esac +done + +# Double-quote double-evaled strings. +for var in reload_cmds \ +old_postinstall_cmds \ +old_postuninstall_cmds \ +old_archive_cmds \ +extract_expsyms_cmds \ +old_archive_from_new_cmds \ +old_archive_from_expsyms_cmds \ +archive_cmds \ +archive_expsym_cmds \ +module_cmds \ +module_expsym_cmds \ +export_symbols_cmds \ +prelink_cmds \ +postlink_cmds \ +postinstall_cmds \ +postuninstall_cmds \ +finish_cmds \ +sys_lib_search_path_spec \ +sys_lib_dlsearch_path_spec; do + case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in + *[\\\\\\\`\\"\\\$]*) + eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\"" + ;; + *) + eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\"" + ;; + esac +done + +ac_aux_dir='$ac_aux_dir' +xsi_shell='$xsi_shell' +lt_shell_append='$lt_shell_append' + +# See if we are running on zsh, and set the options which allow our +# commands through without removal of \ escapes INIT. +if test -n "\${ZSH_VERSION+set}" ; then + setopt NO_GLOB_SUBST +fi + + + PACKAGE='$PACKAGE' + VERSION='$VERSION' + TIMESTAMP='$TIMESTAMP' + RM='$RM' + ofile='$ofile' + + + + +_ACEOF + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 + +# Handling of arguments. +for ac_config_target in $ac_config_targets +do + case $ac_config_target in + "src/libssh2_config.h") CONFIG_HEADERS="$CONFIG_HEADERS src/libssh2_config.h" ;; + "example/libssh2_config.h") CONFIG_HEADERS="$CONFIG_HEADERS example/libssh2_config.h" ;; + "depfiles") CONFIG_COMMANDS="$CONFIG_COMMANDS depfiles" ;; + "libtool") CONFIG_COMMANDS="$CONFIG_COMMANDS libtool" ;; + "Makefile") CONFIG_FILES="$CONFIG_FILES Makefile" ;; + "src/Makefile") CONFIG_FILES="$CONFIG_FILES src/Makefile" ;; + "tests/Makefile") CONFIG_FILES="$CONFIG_FILES tests/Makefile" ;; + "example/Makefile") CONFIG_FILES="$CONFIG_FILES example/Makefile" ;; + "docs/Makefile") CONFIG_FILES="$CONFIG_FILES docs/Makefile" ;; + "libssh2.pc") CONFIG_FILES="$CONFIG_FILES libssh2.pc" ;; + + *) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5;; + esac +done + + +# If the user did not use the arguments to specify the items to instantiate, +# then the envvar interface is used. Set only those that are not. +# We use the long form for the default assignment because of an extremely +# bizarre bug on SunOS 4.1.3. +if $ac_need_defaults; then + test "${CONFIG_FILES+set}" = set || CONFIG_FILES=$config_files + test "${CONFIG_HEADERS+set}" = set || CONFIG_HEADERS=$config_headers + test "${CONFIG_COMMANDS+set}" = set || CONFIG_COMMANDS=$config_commands +fi + +# Have a temporary directory for convenience. Make it in the build tree +# simply because there is no reason against having it here, and in addition, +# creating and moving files from /tmp can sometimes cause problems. +# Hook for its removal unless debugging. +# Note that there is a small window in which the directory will not be cleaned: +# after its creation but before its name has been assigned to `$tmp'. +$debug || +{ + tmp= ac_tmp= + trap 'exit_status=$? + : "${ac_tmp:=$tmp}" + { test ! -d "$ac_tmp" || rm -fr "$ac_tmp"; } && exit $exit_status +' 0 + trap 'as_fn_exit 1' 1 2 13 15 +} +# Create a (secure) tmp directory for tmp files. + +{ + tmp=`(umask 077 && mktemp -d "./confXXXXXX") 2>/dev/null` && + test -d "$tmp" +} || +{ + tmp=./conf$$-$RANDOM + (umask 077 && mkdir "$tmp") +} || as_fn_error $? "cannot create a temporary directory in ." "$LINENO" 5 +ac_tmp=$tmp + +# Set up the scripts for CONFIG_FILES section. +# No need to generate them if there are no CONFIG_FILES. +# This happens for instance with `./config.status config.h'. +if test -n "$CONFIG_FILES"; then + + +ac_cr=`echo X | tr X '\015'` +# On cygwin, bash can eat \r inside `` if the user requested igncr. +# But we know of no other shell where ac_cr would be empty at this +# point, so we can use a bashism as a fallback. +if test "x$ac_cr" = x; then + eval ac_cr=\$\'\\r\' +fi +ac_cs_awk_cr=`$AWK 'BEGIN { print "a\rb" }' /dev/null` +if test "$ac_cs_awk_cr" = "a${ac_cr}b"; then + ac_cs_awk_cr='\\r' +else + ac_cs_awk_cr=$ac_cr +fi + +echo 'BEGIN {' >"$ac_tmp/subs1.awk" && +_ACEOF + + +{ + echo "cat >conf$$subs.awk <<_ACEOF" && + echo "$ac_subst_vars" | sed 's/.*/&!$&$ac_delim/' && + echo "_ACEOF" +} >conf$$subs.sh || + as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 +ac_delim_num=`echo "$ac_subst_vars" | grep -c '^'` +ac_delim='%!_!# ' +for ac_last_try in false false false false false :; do + . ./conf$$subs.sh || + as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 + + ac_delim_n=`sed -n "s/.*$ac_delim\$/X/p" conf$$subs.awk | grep -c X` + if test $ac_delim_n = $ac_delim_num; then + break + elif $ac_last_try; then + as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 + else + ac_delim="$ac_delim!$ac_delim _$ac_delim!! " + fi +done +rm -f conf$$subs.sh + +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +cat >>"\$ac_tmp/subs1.awk" <<\\_ACAWK && +_ACEOF +sed -n ' +h +s/^/S["/; s/!.*/"]=/ +p +g +s/^[^!]*!// +:repl +t repl +s/'"$ac_delim"'$// +t delim +:nl +h +s/\(.\{148\}\)..*/\1/ +t more1 +s/["\\]/\\&/g; s/^/"/; s/$/\\n"\\/ +p +n +b repl +:more1 +s/["\\]/\\&/g; s/^/"/; s/$/"\\/ +p +g +s/.\{148\}// +t nl +:delim +h +s/\(.\{148\}\)..*/\1/ +t more2 +s/["\\]/\\&/g; s/^/"/; s/$/"/ +p +b +:more2 +s/["\\]/\\&/g; s/^/"/; s/$/"\\/ +p +g +s/.\{148\}// +t delim +' >$CONFIG_STATUS || ac_write_fail=1 +rm -f conf$$subs.awk +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +_ACAWK +cat >>"\$ac_tmp/subs1.awk" <<_ACAWK && + for (key in S) S_is_set[key] = 1 + FS = "" + +} +{ + line = $ 0 + nfields = split(line, field, "@") + substed = 0 + len = length(field[1]) + for (i = 2; i < nfields; i++) { + key = field[i] + keylen = length(key) + if (S_is_set[key]) { + value = S[key] + line = substr(line, 1, len) "" value "" substr(line, len + keylen + 3) + len += length(value) + length(field[++i]) + substed = 1 + } else + len += 1 + keylen + } + + print line +} + +_ACAWK +_ACEOF +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +if sed "s/$ac_cr//" < /dev/null > /dev/null 2>&1; then + sed "s/$ac_cr\$//; s/$ac_cr/$ac_cs_awk_cr/g" +else + cat +fi < "$ac_tmp/subs1.awk" > "$ac_tmp/subs.awk" \ + || as_fn_error $? "could not setup config files machinery" "$LINENO" 5 +_ACEOF + +# VPATH may cause trouble with some makes, so we remove sole $(srcdir), +# ${srcdir} and @srcdir@ entries from VPATH if srcdir is ".", strip leading and +# trailing colons and then remove the whole line if VPATH becomes empty +# (actually we leave an empty line to preserve line numbers). +if test "x$srcdir" = x.; then + ac_vpsub='/^[ ]*VPATH[ ]*=[ ]*/{ +h +s/// +s/^/:/ +s/[ ]*$/:/ +s/:\$(srcdir):/:/g +s/:\${srcdir}:/:/g +s/:@srcdir@:/:/g +s/^:*// +s/:*$// +x +s/\(=[ ]*\).*/\1/ +G +s/\n// +s/^[^=]*=[ ]*$// +}' +fi + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +fi # test -n "$CONFIG_FILES" + +# Set up the scripts for CONFIG_HEADERS section. +# No need to generate them if there are no CONFIG_HEADERS. +# This happens for instance with `./config.status Makefile'. +if test -n "$CONFIG_HEADERS"; then +cat >"$ac_tmp/defines.awk" <<\_ACAWK || +BEGIN { +_ACEOF + +# Transform confdefs.h into an awk script `defines.awk', embedded as +# here-document in config.status, that substitutes the proper values into +# config.h.in to produce config.h. + +# Create a delimiter string that does not exist in confdefs.h, to ease +# handling of long lines. +ac_delim='%!_!# ' +for ac_last_try in false false :; do + ac_tt=`sed -n "/$ac_delim/p" confdefs.h` + if test -z "$ac_tt"; then + break + elif $ac_last_try; then + as_fn_error $? "could not make $CONFIG_HEADERS" "$LINENO" 5 + else + ac_delim="$ac_delim!$ac_delim _$ac_delim!! " + fi +done + +# For the awk script, D is an array of macro values keyed by name, +# likewise P contains macro parameters if any. Preserve backslash +# newline sequences. + +ac_word_re=[_$as_cr_Letters][_$as_cr_alnum]* +sed -n ' +s/.\{148\}/&'"$ac_delim"'/g +t rset +:rset +s/^[ ]*#[ ]*define[ ][ ]*/ / +t def +d +:def +s/\\$// +t bsnl +s/["\\]/\\&/g +s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\ +D["\1"]=" \3"/p +s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2"/p +d +:bsnl +s/["\\]/\\&/g +s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\ +D["\1"]=" \3\\\\\\n"\\/p +t cont +s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2\\\\\\n"\\/p +t cont +d +:cont +n +s/.\{148\}/&'"$ac_delim"'/g +t clear +:clear +s/\\$// +t bsnlc +s/["\\]/\\&/g; s/^/"/; s/$/"/p +d +:bsnlc +s/["\\]/\\&/g; s/^/"/; s/$/\\\\\\n"\\/p +b cont +' >$CONFIG_STATUS || ac_write_fail=1 + +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 + for (key in D) D_is_set[key] = 1 + FS = "" +} +/^[\t ]*#[\t ]*(define|undef)[\t ]+$ac_word_re([\t (]|\$)/ { + line = \$ 0 + split(line, arg, " ") + if (arg[1] == "#") { + defundef = arg[2] + mac1 = arg[3] + } else { + defundef = substr(arg[1], 2) + mac1 = arg[2] + } + split(mac1, mac2, "(") #) + macro = mac2[1] + prefix = substr(line, 1, index(line, defundef) - 1) + if (D_is_set[macro]) { + # Preserve the white space surrounding the "#". + print prefix "define", macro P[macro] D[macro] + next + } else { + # Replace #undef with comments. This is necessary, for example, + # in the case of _POSIX_SOURCE, which is predefined and required + # on some systems where configure will not decide to define it. + if (defundef == "undef") { + print "/*", prefix defundef, macro, "*/" + next + } + } +} +{ print } +_ACAWK +_ACEOF +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 + as_fn_error $? "could not setup config headers machinery" "$LINENO" 5 +fi # test -n "$CONFIG_HEADERS" + + +eval set X " :F $CONFIG_FILES :H $CONFIG_HEADERS :C $CONFIG_COMMANDS" +shift +for ac_tag +do + case $ac_tag in + :[FHLC]) ac_mode=$ac_tag; continue;; + esac + case $ac_mode$ac_tag in + :[FHL]*:*);; + :L* | :C*:*) as_fn_error $? "invalid tag \`$ac_tag'" "$LINENO" 5;; + :[FH]-) ac_tag=-:-;; + :[FH]*) ac_tag=$ac_tag:$ac_tag.in;; + esac + ac_save_IFS=$IFS + IFS=: + set x $ac_tag + IFS=$ac_save_IFS + shift + ac_file=$1 + shift + + case $ac_mode in + :L) ac_source=$1;; + :[FH]) + ac_file_inputs= + for ac_f + do + case $ac_f in + -) ac_f="$ac_tmp/stdin";; + *) # Look for the file first in the build tree, then in the source tree + # (if the path is not absolute). The absolute path cannot be DOS-style, + # because $ac_f cannot contain `:'. + test -f "$ac_f" || + case $ac_f in + [\\/$]*) false;; + *) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";; + esac || + as_fn_error 1 "cannot find input file: \`$ac_f'" "$LINENO" 5;; + esac + case $ac_f in *\'*) ac_f=`$as_echo "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac + as_fn_append ac_file_inputs " '$ac_f'" + done + + # Let's still pretend it is `configure' which instantiates (i.e., don't + # use $as_me), people would be surprised to read: + # /* config.h. Generated by config.status. */ + configure_input='Generated from '` + $as_echo "$*" | sed 's|^[^:]*/||;s|:[^:]*/|, |g' + `' by configure.' + if test x"$ac_file" != x-; then + configure_input="$ac_file. $configure_input" + { $as_echo "$as_me:${as_lineno-$LINENO}: creating $ac_file" >&5 +$as_echo "$as_me: creating $ac_file" >&6;} + fi + # Neutralize special characters interpreted by sed in replacement strings. + case $configure_input in #( + *\&* | *\|* | *\\* ) + ac_sed_conf_input=`$as_echo "$configure_input" | + sed 's/[\\\\&|]/\\\\&/g'`;; #( + *) ac_sed_conf_input=$configure_input;; + esac + + case $ac_tag in + *:-:* | *:-) cat >"$ac_tmp/stdin" \ + || as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;; + esac + ;; + esac + + ac_dir=`$as_dirname -- "$ac_file" || +$as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$ac_file" : 'X\(//\)[^/]' \| \ + X"$ac_file" : 'X\(//\)$' \| \ + X"$ac_file" : 'X\(/\)' \| . 2>/dev/null || +$as_echo X"$ac_file" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + as_dir="$ac_dir"; as_fn_mkdir_p + ac_builddir=. + +case "$ac_dir" in +.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; +*) + ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` + # A ".." for each directory in $ac_dir_suffix. + ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` + case $ac_top_builddir_sub in + "") ac_top_builddir_sub=. ac_top_build_prefix= ;; + *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; + esac ;; +esac +ac_abs_top_builddir=$ac_pwd +ac_abs_builddir=$ac_pwd$ac_dir_suffix +# for backward compatibility: +ac_top_builddir=$ac_top_build_prefix + +case $srcdir in + .) # We are building in place. + ac_srcdir=. + ac_top_srcdir=$ac_top_builddir_sub + ac_abs_top_srcdir=$ac_pwd ;; + [\\/]* | ?:[\\/]* ) # Absolute name. + ac_srcdir=$srcdir$ac_dir_suffix; + ac_top_srcdir=$srcdir + ac_abs_top_srcdir=$srcdir ;; + *) # Relative name. + ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix + ac_top_srcdir=$ac_top_build_prefix$srcdir + ac_abs_top_srcdir=$ac_pwd/$srcdir ;; +esac +ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix + + + case $ac_mode in + :F) + # + # CONFIG_FILE + # + + case $INSTALL in + [\\/$]* | ?:[\\/]* ) ac_INSTALL=$INSTALL ;; + *) ac_INSTALL=$ac_top_build_prefix$INSTALL ;; + esac + ac_MKDIR_P=$MKDIR_P + case $MKDIR_P in + [\\/$]* | ?:[\\/]* ) ;; + */*) ac_MKDIR_P=$ac_top_build_prefix$MKDIR_P ;; + esac +_ACEOF + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +# If the template does not know about datarootdir, expand it. +# FIXME: This hack should be removed a few years after 2.60. +ac_datarootdir_hack=; ac_datarootdir_seen= +ac_sed_dataroot=' +/datarootdir/ { + p + q +} +/@datadir@/p +/@docdir@/p +/@infodir@/p +/@localedir@/p +/@mandir@/p' +case `eval "sed -n \"\$ac_sed_dataroot\" $ac_file_inputs"` in +*datarootdir*) ac_datarootdir_seen=yes;; +*@datadir@*|*@docdir@*|*@infodir@*|*@localedir@*|*@mandir@*) + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&5 +$as_echo "$as_me: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&2;} +_ACEOF +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 + ac_datarootdir_hack=' + s&@datadir@&$datadir&g + s&@docdir@&$docdir&g + s&@infodir@&$infodir&g + s&@localedir@&$localedir&g + s&@mandir@&$mandir&g + s&\\\${datarootdir}&$datarootdir&g' ;; +esac +_ACEOF + +# Neutralize VPATH when `$srcdir' = `.'. +# Shell code in configure.ac might set extrasub. +# FIXME: do we really want to maintain this feature? +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +ac_sed_extra="$ac_vpsub +$extrasub +_ACEOF +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +:t +/@[a-zA-Z_][a-zA-Z_0-9]*@/!b +s|@configure_input@|$ac_sed_conf_input|;t t +s&@top_builddir@&$ac_top_builddir_sub&;t t +s&@top_build_prefix@&$ac_top_build_prefix&;t t +s&@srcdir@&$ac_srcdir&;t t +s&@abs_srcdir@&$ac_abs_srcdir&;t t +s&@top_srcdir@&$ac_top_srcdir&;t t +s&@abs_top_srcdir@&$ac_abs_top_srcdir&;t t +s&@builddir@&$ac_builddir&;t t +s&@abs_builddir@&$ac_abs_builddir&;t t +s&@abs_top_builddir@&$ac_abs_top_builddir&;t t +s&@INSTALL@&$ac_INSTALL&;t t +s&@MKDIR_P@&$ac_MKDIR_P&;t t +$ac_datarootdir_hack +" +eval sed \"\$ac_sed_extra\" "$ac_file_inputs" | $AWK -f "$ac_tmp/subs.awk" \ + >$ac_tmp/out || as_fn_error $? "could not create $ac_file" "$LINENO" 5 + +test -z "$ac_datarootdir_hack$ac_datarootdir_seen" && + { ac_out=`sed -n '/\${datarootdir}/p' "$ac_tmp/out"`; test -n "$ac_out"; } && + { ac_out=`sed -n '/^[ ]*datarootdir[ ]*:*=/p' \ + "$ac_tmp/out"`; test -z "$ac_out"; } && + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable \`datarootdir' +which seems to be undefined. Please make sure it is defined" >&5 +$as_echo "$as_me: WARNING: $ac_file contains a reference to the variable \`datarootdir' +which seems to be undefined. Please make sure it is defined" >&2;} + + rm -f "$ac_tmp/stdin" + case $ac_file in + -) cat "$ac_tmp/out" && rm -f "$ac_tmp/out";; + *) rm -f "$ac_file" && mv "$ac_tmp/out" "$ac_file";; + esac \ + || as_fn_error $? "could not create $ac_file" "$LINENO" 5 + ;; + :H) + # + # CONFIG_HEADER + # + if test x"$ac_file" != x-; then + { + $as_echo "/* $configure_input */" \ + && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs" + } >"$ac_tmp/config.h" \ + || as_fn_error $? "could not create $ac_file" "$LINENO" 5 + if diff "$ac_file" "$ac_tmp/config.h" >/dev/null 2>&1; then + { $as_echo "$as_me:${as_lineno-$LINENO}: $ac_file is unchanged" >&5 +$as_echo "$as_me: $ac_file is unchanged" >&6;} + else + rm -f "$ac_file" + mv "$ac_tmp/config.h" "$ac_file" \ + || as_fn_error $? "could not create $ac_file" "$LINENO" 5 + fi + else + $as_echo "/* $configure_input */" \ + && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs" \ + || as_fn_error $? "could not create -" "$LINENO" 5 + fi +# Compute "$ac_file"'s index in $config_headers. +_am_arg="$ac_file" +_am_stamp_count=1 +for _am_header in $config_headers :; do + case $_am_header in + $_am_arg | $_am_arg:* ) + break ;; + * ) + _am_stamp_count=`expr $_am_stamp_count + 1` ;; + esac +done +echo "timestamp for $_am_arg" >`$as_dirname -- "$_am_arg" || +$as_expr X"$_am_arg" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$_am_arg" : 'X\(//\)[^/]' \| \ + X"$_am_arg" : 'X\(//\)$' \| \ + X"$_am_arg" : 'X\(/\)' \| . 2>/dev/null || +$as_echo X"$_am_arg" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'`/stamp-h$_am_stamp_count + ;; + + :C) { $as_echo "$as_me:${as_lineno-$LINENO}: executing $ac_file commands" >&5 +$as_echo "$as_me: executing $ac_file commands" >&6;} + ;; + esac + + + case $ac_file$ac_mode in + "depfiles":C) test x"$AMDEP_TRUE" != x"" || { + # Older Autoconf quotes --file arguments for eval, but not when files + # are listed without --file. Let's play safe and only enable the eval + # if we detect the quoting. + case $CONFIG_FILES in + *\'*) eval set x "$CONFIG_FILES" ;; + *) set x $CONFIG_FILES ;; + esac + shift + for mf + do + # Strip MF so we end up with the name of the file. + mf=`echo "$mf" | sed -e 's/:.*$//'` + # Check whether this is an Automake generated Makefile or not. + # We used to match only the files named 'Makefile.in', but + # some people rename them; so instead we look at the file content. + # Grep'ing the first line is not enough: some people post-process + # each Makefile.in and add a new line on top of each file to say so. + # Grep'ing the whole file is not good either: AIX grep has a line + # limit of 2048, but all sed's we know have understand at least 4000. + if sed -n 's,^#.*generated by automake.*,X,p' "$mf" | grep X >/dev/null 2>&1; then + dirpart=`$as_dirname -- "$mf" || +$as_expr X"$mf" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$mf" : 'X\(//\)[^/]' \| \ + X"$mf" : 'X\(//\)$' \| \ + X"$mf" : 'X\(/\)' \| . 2>/dev/null || +$as_echo X"$mf" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + else + continue + fi + # Extract the definition of DEPDIR, am__include, and am__quote + # from the Makefile without running 'make'. + DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"` + test -z "$DEPDIR" && continue + am__include=`sed -n 's/^am__include = //p' < "$mf"` + test -z "$am__include" && continue + am__quote=`sed -n 's/^am__quote = //p' < "$mf"` + # Find all dependency output files, they are included files with + # $(DEPDIR) in their names. We invoke sed twice because it is the + # simplest approach to changing $(DEPDIR) to its actual value in the + # expansion. + for file in `sed -n " + s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \ + sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g'`; do + # Make sure the directory exists. + test -f "$dirpart/$file" && continue + fdir=`$as_dirname -- "$file" || +$as_expr X"$file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$file" : 'X\(//\)[^/]' \| \ + X"$file" : 'X\(//\)$' \| \ + X"$file" : 'X\(/\)' \| . 2>/dev/null || +$as_echo X"$file" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + as_dir=$dirpart/$fdir; as_fn_mkdir_p + # echo "creating $dirpart/$file" + echo '# dummy' > "$dirpart/$file" + done + done +} + ;; + "libtool":C) + + # See if we are running on zsh, and set the options which allow our + # commands through without removal of \ escapes. + if test -n "${ZSH_VERSION+set}" ; then + setopt NO_GLOB_SUBST + fi + + cfgfile="${ofile}T" + trap "$RM \"$cfgfile\"; exit 1" 1 2 15 + $RM "$cfgfile" + + cat <<_LT_EOF >> "$cfgfile" +#! $SHELL + +# `$ECHO "$ofile" | sed 's%^.*/%%'` - Provide generalized library-building support services. +# Generated automatically by $as_me ($PACKAGE$TIMESTAMP) $VERSION +# Libtool was configured on host `(hostname || uname -n) 2>/dev/null | sed 1q`: +# NOTE: Changes made to this file will be lost: look at ltmain.sh. +# +# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, +# 2006, 2007, 2008, 2009, 2010, 2011 Free Software +# Foundation, Inc. +# Written by Gordon Matzigkeit, 1996 +# +# This file is part of GNU Libtool. +# +# GNU Libtool is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation; either version 2 of +# the License, or (at your option) any later version. +# +# As a special exception to the GNU General Public License, +# if you distribute this file as part of a program or library that +# is built using GNU Libtool, you may include this file under the +# same distribution terms that you use for the rest of that program. +# +# GNU Libtool is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with GNU Libtool; see the file COPYING. If not, a copy +# can be downloaded from http://www.gnu.org/licenses/gpl.html, or +# obtained by writing to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + + +# The names of the tagged configurations supported by this script. +available_tags="" + +# ### BEGIN LIBTOOL CONFIG + +# Assembler program. +AS=$lt_AS + +# DLL creation program. +DLLTOOL=$lt_DLLTOOL + +# Object dumper program. +OBJDUMP=$lt_OBJDUMP + +# Which release of libtool.m4 was used? +macro_version=$macro_version +macro_revision=$macro_revision + +# Whether or not to build shared libraries. +build_libtool_libs=$enable_shared + +# Whether or not to build static libraries. +build_old_libs=$enable_static + +# What type of objects to build. +pic_mode=$pic_mode + +# Whether or not to optimize for fast installation. +fast_install=$enable_fast_install + +# Shell to use when invoking shell scripts. +SHELL=$lt_SHELL + +# An echo program that protects backslashes. +ECHO=$lt_ECHO + +# The PATH separator for the build system. +PATH_SEPARATOR=$lt_PATH_SEPARATOR + +# The host system. +host_alias=$host_alias +host=$host +host_os=$host_os + +# The build system. +build_alias=$build_alias +build=$build +build_os=$build_os + +# A sed program that does not truncate output. +SED=$lt_SED + +# Sed that helps us avoid accidentally triggering echo(1) options like -n. +Xsed="\$SED -e 1s/^X//" + +# A grep program that handles long lines. +GREP=$lt_GREP + +# An ERE matcher. +EGREP=$lt_EGREP + +# A literal string matcher. +FGREP=$lt_FGREP + +# A BSD- or MS-compatible name lister. +NM=$lt_NM + +# Whether we need soft or hard links. +LN_S=$lt_LN_S + +# What is the maximum length of a command? +max_cmd_len=$max_cmd_len + +# Object file suffix (normally "o"). +objext=$ac_objext + +# Executable file suffix (normally ""). +exeext=$exeext + +# whether the shell understands "unset". +lt_unset=$lt_unset + +# turn spaces into newlines. +SP2NL=$lt_lt_SP2NL + +# turn newlines into spaces. +NL2SP=$lt_lt_NL2SP + +# convert \$build file names to \$host format. +to_host_file_cmd=$lt_cv_to_host_file_cmd + +# convert \$build files to toolchain format. +to_tool_file_cmd=$lt_cv_to_tool_file_cmd + +# Method to check whether dependent libraries are shared objects. +deplibs_check_method=$lt_deplibs_check_method + +# Command to use when deplibs_check_method = "file_magic". +file_magic_cmd=$lt_file_magic_cmd + +# How to find potential files when deplibs_check_method = "file_magic". +file_magic_glob=$lt_file_magic_glob + +# Find potential files using nocaseglob when deplibs_check_method = "file_magic". +want_nocaseglob=$lt_want_nocaseglob + +# Command to associate shared and link libraries. +sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd + +# The archiver. +AR=$lt_AR + +# Flags to create an archive. +AR_FLAGS=$lt_AR_FLAGS + +# How to feed a file listing to the archiver. +archiver_list_spec=$lt_archiver_list_spec + +# A symbol stripping program. +STRIP=$lt_STRIP + +# Commands used to install an old-style archive. +RANLIB=$lt_RANLIB +old_postinstall_cmds=$lt_old_postinstall_cmds +old_postuninstall_cmds=$lt_old_postuninstall_cmds + +# Whether to use a lock for old archive extraction. +lock_old_archive_extraction=$lock_old_archive_extraction + +# A C compiler. +LTCC=$lt_CC + +# LTCC compiler flags. +LTCFLAGS=$lt_CFLAGS + +# Take the output of nm and produce a listing of raw symbols and C names. +global_symbol_pipe=$lt_lt_cv_sys_global_symbol_pipe + +# Transform the output of nm in a proper C declaration. +global_symbol_to_cdecl=$lt_lt_cv_sys_global_symbol_to_cdecl + +# Transform the output of nm in a C name address pair. +global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address + +# Transform the output of nm in a C name address pair when lib prefix is needed. +global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix + +# Specify filename containing input files for \$NM. +nm_file_list_spec=$lt_nm_file_list_spec + +# The root where to search for dependent libraries,and in which our libraries should be installed. +lt_sysroot=$lt_sysroot + +# The name of the directory that contains temporary libtool files. +objdir=$objdir + +# Used to examine libraries when file_magic_cmd begins with "file". +MAGIC_CMD=$MAGIC_CMD + +# Must we lock files when doing compilation? +need_locks=$lt_need_locks + +# Manifest tool. +MANIFEST_TOOL=$lt_MANIFEST_TOOL + +# Tool to manipulate archived DWARF debug symbol files on Mac OS X. +DSYMUTIL=$lt_DSYMUTIL + +# Tool to change global to local symbols on Mac OS X. +NMEDIT=$lt_NMEDIT + +# Tool to manipulate fat objects and archives on Mac OS X. +LIPO=$lt_LIPO + +# ldd/readelf like tool for Mach-O binaries on Mac OS X. +OTOOL=$lt_OTOOL + +# ldd/readelf like tool for 64 bit Mach-O binaries on Mac OS X 10.4. +OTOOL64=$lt_OTOOL64 + +# Old archive suffix (normally "a"). +libext=$libext + +# Shared library suffix (normally ".so"). +shrext_cmds=$lt_shrext_cmds + +# The commands to extract the exported symbol list from a shared archive. +extract_expsyms_cmds=$lt_extract_expsyms_cmds + +# Variables whose values should be saved in libtool wrapper scripts and +# restored at link time. +variables_saved_for_relink=$lt_variables_saved_for_relink + +# Do we need the "lib" prefix for modules? +need_lib_prefix=$need_lib_prefix + +# Do we need a version for libraries? +need_version=$need_version + +# Library versioning type. +version_type=$version_type + +# Shared library runtime path variable. +runpath_var=$runpath_var + +# Shared library path variable. +shlibpath_var=$shlibpath_var + +# Is shlibpath searched before the hard-coded library search path? +shlibpath_overrides_runpath=$shlibpath_overrides_runpath + +# Format of library name prefix. +libname_spec=$lt_libname_spec + +# List of archive names. First name is the real one, the rest are links. +# The last name is the one that the linker finds with -lNAME +library_names_spec=$lt_library_names_spec + +# The coded name of the library, if different from the real name. +soname_spec=$lt_soname_spec + +# Permission mode override for installation of shared libraries. +install_override_mode=$lt_install_override_mode + +# Command to use after installation of a shared archive. +postinstall_cmds=$lt_postinstall_cmds + +# Command to use after uninstallation of a shared archive. +postuninstall_cmds=$lt_postuninstall_cmds + +# Commands used to finish a libtool library installation in a directory. +finish_cmds=$lt_finish_cmds + +# As "finish_cmds", except a single script fragment to be evaled but +# not shown. +finish_eval=$lt_finish_eval + +# Whether we should hardcode library paths into libraries. +hardcode_into_libs=$hardcode_into_libs + +# Compile-time system search path for libraries. +sys_lib_search_path_spec=$lt_sys_lib_search_path_spec + +# Run-time system search path for libraries. +sys_lib_dlsearch_path_spec=$lt_sys_lib_dlsearch_path_spec + +# Whether dlopen is supported. +dlopen_support=$enable_dlopen + +# Whether dlopen of programs is supported. +dlopen_self=$enable_dlopen_self + +# Whether dlopen of statically linked programs is supported. +dlopen_self_static=$enable_dlopen_self_static + +# Commands to strip libraries. +old_striplib=$lt_old_striplib +striplib=$lt_striplib + + +# The linker used to build libraries. +LD=$lt_LD + +# How to create reloadable object files. +reload_flag=$lt_reload_flag +reload_cmds=$lt_reload_cmds + +# Commands used to build an old-style archive. +old_archive_cmds=$lt_old_archive_cmds + +# A language specific compiler. +CC=$lt_compiler + +# Is the compiler the GNU compiler? +with_gcc=$GCC + +# Compiler flag to turn off builtin functions. +no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag + +# Additional compiler flags for building library objects. +pic_flag=$lt_lt_prog_compiler_pic + +# How to pass a linker flag through the compiler. +wl=$lt_lt_prog_compiler_wl + +# Compiler flag to prevent dynamic linking. +link_static_flag=$lt_lt_prog_compiler_static + +# Does compiler simultaneously support -c and -o options? +compiler_c_o=$lt_lt_cv_prog_compiler_c_o + +# Whether or not to add -lc for building shared libraries. +build_libtool_need_lc=$archive_cmds_need_lc + +# Whether or not to disallow shared libs when runtime libs are static. +allow_libtool_libs_with_static_runtimes=$enable_shared_with_static_runtimes + +# Compiler flag to allow reflexive dlopens. +export_dynamic_flag_spec=$lt_export_dynamic_flag_spec + +# Compiler flag to generate shared objects directly from archives. +whole_archive_flag_spec=$lt_whole_archive_flag_spec + +# Whether the compiler copes with passing no objects directly. +compiler_needs_object=$lt_compiler_needs_object + +# Create an old-style archive from a shared archive. +old_archive_from_new_cmds=$lt_old_archive_from_new_cmds + +# Create a temporary old-style archive to link instead of a shared archive. +old_archive_from_expsyms_cmds=$lt_old_archive_from_expsyms_cmds + +# Commands used to build a shared archive. +archive_cmds=$lt_archive_cmds +archive_expsym_cmds=$lt_archive_expsym_cmds + +# Commands used to build a loadable module if different from building +# a shared archive. +module_cmds=$lt_module_cmds +module_expsym_cmds=$lt_module_expsym_cmds + +# Whether we are building with GNU ld or not. +with_gnu_ld=$lt_with_gnu_ld + +# Flag that allows shared libraries with undefined symbols to be built. +allow_undefined_flag=$lt_allow_undefined_flag + +# Flag that enforces no undefined symbols. +no_undefined_flag=$lt_no_undefined_flag + +# Flag to hardcode \$libdir into a binary during linking. +# This must work even if \$libdir does not exist +hardcode_libdir_flag_spec=$lt_hardcode_libdir_flag_spec + +# Whether we need a single "-rpath" flag with a separated argument. +hardcode_libdir_separator=$lt_hardcode_libdir_separator + +# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes +# DIR into the resulting binary. +hardcode_direct=$hardcode_direct + +# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes +# DIR into the resulting binary and the resulting library dependency is +# "absolute",i.e impossible to change by setting \${shlibpath_var} if the +# library is relocated. +hardcode_direct_absolute=$hardcode_direct_absolute + +# Set to "yes" if using the -LDIR flag during linking hardcodes DIR +# into the resulting binary. +hardcode_minus_L=$hardcode_minus_L + +# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR +# into the resulting binary. +hardcode_shlibpath_var=$hardcode_shlibpath_var + +# Set to "yes" if building a shared library automatically hardcodes DIR +# into the library and all subsequent libraries and executables linked +# against it. +hardcode_automatic=$hardcode_automatic + +# Set to yes if linker adds runtime paths of dependent libraries +# to runtime path list. +inherit_rpath=$inherit_rpath + +# Whether libtool must link a program against all its dependency libraries. +link_all_deplibs=$link_all_deplibs + +# Set to "yes" if exported symbols are required. +always_export_symbols=$always_export_symbols + +# The commands to list exported symbols. +export_symbols_cmds=$lt_export_symbols_cmds + +# Symbols that should not be listed in the preloaded symbols. +exclude_expsyms=$lt_exclude_expsyms + +# Symbols that must always be exported. +include_expsyms=$lt_include_expsyms + +# Commands necessary for linking programs (against libraries) with templates. +prelink_cmds=$lt_prelink_cmds + +# Commands necessary for finishing linking programs. +postlink_cmds=$lt_postlink_cmds + +# Specify filename containing input files. +file_list_spec=$lt_file_list_spec + +# How to hardcode a shared library path into an executable. +hardcode_action=$hardcode_action + +# ### END LIBTOOL CONFIG + +_LT_EOF + + case $host_os in + aix3*) + cat <<\_LT_EOF >> "$cfgfile" +# AIX sometimes has problems with the GCC collect2 program. For some +# reason, if we set the COLLECT_NAMES environment variable, the problems +# vanish in a puff of smoke. +if test "X${COLLECT_NAMES+set}" != Xset; then + COLLECT_NAMES= + export COLLECT_NAMES +fi +_LT_EOF + ;; + esac + + +ltmain="$ac_aux_dir/ltmain.sh" + + + # We use sed instead of cat because bash on DJGPP gets confused if + # if finds mixed CR/LF and LF-only lines. Since sed operates in + # text mode, it properly converts lines to CR/LF. This bash problem + # is reportedly fixed, but why not run on old versions too? + sed '$q' "$ltmain" >> "$cfgfile" \ + || (rm -f "$cfgfile"; exit 1) + + if test x"$xsi_shell" = xyes; then + sed -e '/^func_dirname ()$/,/^} # func_dirname /c\ +func_dirname ()\ +{\ +\ case ${1} in\ +\ */*) func_dirname_result="${1%/*}${2}" ;;\ +\ * ) func_dirname_result="${3}" ;;\ +\ esac\ +} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \ + && mv -f "$cfgfile.tmp" "$cfgfile" \ + || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") +test 0 -eq $? || _lt_function_replace_fail=: + + + sed -e '/^func_basename ()$/,/^} # func_basename /c\ +func_basename ()\ +{\ +\ func_basename_result="${1##*/}"\ +} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \ + && mv -f "$cfgfile.tmp" "$cfgfile" \ + || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") +test 0 -eq $? || _lt_function_replace_fail=: + + + sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\ +func_dirname_and_basename ()\ +{\ +\ case ${1} in\ +\ */*) func_dirname_result="${1%/*}${2}" ;;\ +\ * ) func_dirname_result="${3}" ;;\ +\ esac\ +\ func_basename_result="${1##*/}"\ +} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \ + && mv -f "$cfgfile.tmp" "$cfgfile" \ + || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") +test 0 -eq $? || _lt_function_replace_fail=: + + + sed -e '/^func_stripname ()$/,/^} # func_stripname /c\ +func_stripname ()\ +{\ +\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\ +\ # positional parameters, so assign one to ordinary parameter first.\ +\ func_stripname_result=${3}\ +\ func_stripname_result=${func_stripname_result#"${1}"}\ +\ func_stripname_result=${func_stripname_result%"${2}"}\ +} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \ + && mv -f "$cfgfile.tmp" "$cfgfile" \ + || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") +test 0 -eq $? || _lt_function_replace_fail=: + + + sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\ +func_split_long_opt ()\ +{\ +\ func_split_long_opt_name=${1%%=*}\ +\ func_split_long_opt_arg=${1#*=}\ +} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \ + && mv -f "$cfgfile.tmp" "$cfgfile" \ + || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") +test 0 -eq $? || _lt_function_replace_fail=: + + + sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\ +func_split_short_opt ()\ +{\ +\ func_split_short_opt_arg=${1#??}\ +\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\ +} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \ + && mv -f "$cfgfile.tmp" "$cfgfile" \ + || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") +test 0 -eq $? || _lt_function_replace_fail=: + + + sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\ +func_lo2o ()\ +{\ +\ case ${1} in\ +\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\ +\ *) func_lo2o_result=${1} ;;\ +\ esac\ +} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \ + && mv -f "$cfgfile.tmp" "$cfgfile" \ + || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") +test 0 -eq $? || _lt_function_replace_fail=: + + + sed -e '/^func_xform ()$/,/^} # func_xform /c\ +func_xform ()\ +{\ + func_xform_result=${1%.*}.lo\ +} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \ + && mv -f "$cfgfile.tmp" "$cfgfile" \ + || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") +test 0 -eq $? || _lt_function_replace_fail=: + + + sed -e '/^func_arith ()$/,/^} # func_arith /c\ +func_arith ()\ +{\ + func_arith_result=$(( $* ))\ +} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \ + && mv -f "$cfgfile.tmp" "$cfgfile" \ + || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") +test 0 -eq $? || _lt_function_replace_fail=: + + + sed -e '/^func_len ()$/,/^} # func_len /c\ +func_len ()\ +{\ + func_len_result=${#1}\ +} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \ + && mv -f "$cfgfile.tmp" "$cfgfile" \ + || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") +test 0 -eq $? || _lt_function_replace_fail=: + +fi + +if test x"$lt_shell_append" = xyes; then + sed -e '/^func_append ()$/,/^} # func_append /c\ +func_append ()\ +{\ + eval "${1}+=\\${2}"\ +} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \ + && mv -f "$cfgfile.tmp" "$cfgfile" \ + || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") +test 0 -eq $? || _lt_function_replace_fail=: + + + sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\ +func_append_quoted ()\ +{\ +\ func_quote_for_eval "${2}"\ +\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\ +} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \ + && mv -f "$cfgfile.tmp" "$cfgfile" \ + || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") +test 0 -eq $? || _lt_function_replace_fail=: + + + # Save a `func_append' function call where possible by direct use of '+=' + sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \ + && mv -f "$cfgfile.tmp" "$cfgfile" \ + || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") + test 0 -eq $? || _lt_function_replace_fail=: +else + # Save a `func_append' function call even when '+=' is not available + sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \ + && mv -f "$cfgfile.tmp" "$cfgfile" \ + || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") + test 0 -eq $? || _lt_function_replace_fail=: +fi + +if test x"$_lt_function_replace_fail" = x":"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5 +$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;} +fi + + + mv -f "$cfgfile" "$ofile" || + (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile") + chmod +x "$ofile" + + ;; + + esac +done # for ac_tag + + +as_fn_exit 0 +_ACEOF +ac_clean_files=$ac_clean_files_save + +test $ac_write_fail = 0 || + as_fn_error $? "write failure creating $CONFIG_STATUS" "$LINENO" 5 + + +# configure is writing to config.log, and then calls config.status. +# config.status does its own redirection, appending to config.log. +# Unfortunately, on DOS this fails, as config.log is still kept open +# by configure, so config.status won't be able to write to it; its +# output is simply discarded. So we exec the FD to /dev/null, +# effectively closing config.log, so it can be properly (re)opened and +# appended to by config.status. When coming back to configure, we +# need to make the FD available again. +if test "$no_create" != yes; then + ac_cs_success=: + ac_config_status_args= + test "$silent" = yes && + ac_config_status_args="$ac_config_status_args --quiet" + exec 5>/dev/null + $SHELL $CONFIG_STATUS $ac_config_status_args || ac_cs_success=false + exec 5>>config.log + # Use ||, not &&, to avoid exiting from the if with $? = 1, which + # would make configure fail if this is the last instruction. + $ac_cs_success || as_fn_exit 1 +fi +if test -n "$ac_unrecognized_opts" && test "$enable_option_checking" != no; then + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: unrecognized options: $ac_unrecognized_opts" >&5 +$as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2;} +fi + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: summary of build options: + + version: ${LIBSSH2VER} + Host type: ${host} + Install prefix: ${prefix} + Compiler: ${CC} + Compiler flags: ${CFLAGS} + Library types: Shared=${enable_shared}, Static=${enable_static} + Crypto library: ${found_crypto} + Clear memory: $enable_clear_memory + Debug build: $enable_debug + Build examples: $build_examples + Path to sshd: $ac_cv_path_SSHD (only for self-tests) + zlib compression: $ac_cv_libz +" >&5 +$as_echo "$as_me: summary of build options: + + version: ${LIBSSH2VER} + Host type: ${host} + Install prefix: ${prefix} + Compiler: ${CC} + Compiler flags: ${CFLAGS} + Library types: Shared=${enable_shared}, Static=${enable_static} + Crypto library: ${found_crypto} + Clear memory: $enable_clear_memory + Debug build: $enable_debug + Build examples: $build_examples + Path to sshd: $ac_cv_path_SSHD (only for self-tests) + zlib compression: $ac_cv_libz +" >&6;} diff --git a/vendor/libssh2/configure.ac b/vendor/libssh2/configure.ac new file mode 100644 index 000000000..d6bdab4a9 --- /dev/null +++ b/vendor/libssh2/configure.ac @@ -0,0 +1,404 @@ +# AC_PREREQ(2.57) +AC_INIT(libssh2, [-], libssh2-devel@cool.haxx.se) +AC_CONFIG_MACRO_DIR([m4]) +AC_CONFIG_SRCDIR([src]) +AC_CONFIG_HEADERS([src/libssh2_config.h example/libssh2_config.h]) +AM_MAINTAINER_MODE +m4_ifdef([AM_SILENT_RULES], [AM_SILENT_RULES([yes])]) + +dnl SED is needed by some of the tools +AC_PATH_PROG( SED, sed, sed-was-not-found-by-configure, + $PATH:/usr/bin:/usr/local/bin) +AC_SUBST(SED) + +if test "x$SED" = "xsed-was-not-found-by-configure"; then + AC_MSG_WARN([sed was not found, this may ruin your chances to build fine]) +fi + +dnl figure out the libssh2 version +LIBSSH2VER=`$SED -ne 's/^#define LIBSSH2_VERSION *"\(.*\)"/\1/p' ${srcdir}/include/libssh2.h` +AM_INIT_AUTOMAKE +AC_MSG_CHECKING([libssh2 version]) +AC_MSG_RESULT($LIBSSH2VER) + +AC_SUBST(LIBSSH2VER) + +AB_VERSION=$LIBSSH2VER + +AB_INIT + +# Check for the OS. +# Daniel's note: this should not be necessary and we need to work to +# get this removed. +AC_CANONICAL_HOST +case "$host" in + *-mingw*) + CFLAGS="$CFLAGS -DLIBSSH2_WIN32" + LIBS="$LIBS -lws2_32" + ;; + *-cygwin) + CFLAGS="$CFLAGS -DLIBSSH2_WIN32" + ;; + *darwin*) + CFLAGS="$CFLAGS -DLIBSSH2_DARWIN" + ;; + *hpux*) + ;; + *osf*) + CFLAGS="$CFLAGS -D_POSIX_PII_SOCKET" + ;; + *) + ;; +esac + +AC_CHECK_TYPE(long long, + [AC_DEFINE(HAVE_LONGLONG, 1, + [Define to 1 if the compiler supports the 'long long' data type.])] + longlong="yes" +) + +dnl Our configure and build reentrant settings +CURL_CONFIGURE_REENTRANT + +# Some systems (Solaris?) have socket() in -lsocket. +AC_SEARCH_LIBS(socket, socket) + +# Solaris has inet_addr() in -lnsl. +AC_SEARCH_LIBS(inet_addr, nsl) + +AC_SUBST(LIBS) + +AC_PROG_CC +AC_PROG_INSTALL +AC_PROG_LN_S +AC_PROG_MAKE_SET +AC_PATH_PROGS(SSHD, [sshd], [], + [$PATH$PATH_SEPARATOR/usr/libexec$PATH_SEPARATOR]dnl + [/usr/sbin$PATH_SEPARATOR/usr/etc$PATH_SEPARATOR/etc]) +AM_CONDITIONAL(SSHD, test -n "$SSHD") +AC_LIBTOOL_WIN32_DLL +AC_PROG_LIBTOOL +AC_C_BIGENDIAN + +dnl check for how to do large files +AC_SYS_LARGEFILE + +# Configure parameters +AC_ARG_WITH(openssl, + AC_HELP_STRING([--with-openssl],[Use OpenSSL for crypto]), + use_openssl=$withval,use_openssl=auto) +AC_ARG_WITH(libgcrypt, + AC_HELP_STRING([--with-libgcrypt],[Use libgcrypt for crypto]), + use_libgcrypt=$withval,use_libgcrypt=auto) +AC_ARG_WITH(wincng, + AC_HELP_STRING([--with-wincng],[Use Windows CNG for crypto]), + use_wincng=$withval,use_wincng=auto) +AC_ARG_WITH(libz, + AC_HELP_STRING([--with-libz],[Use zlib for compression]), + use_libz=$withval,use_libz=auto) + +found_crypto=none +support_clear_memory=no + +# Look for OpenSSL +if test "$found_crypto" = "none" && test "$use_openssl" != "no"; then + AC_LIB_HAVE_LINKFLAGS([ssl], [crypto], [#include ]) +fi +if test "$ac_cv_libssl" = "yes"; then + AC_DEFINE(LIBSSH2_OPENSSL, 1, [Use OpenSSL]) + LIBSREQUIRED=libssl,libcrypto + + # Not all OpenSSL have AES-CTR functions. + save_LIBS="$LIBS" + LIBS="$LIBS $LIBSSL" + AC_CHECK_FUNCS(EVP_aes_128_ctr) + LIBS="$save_LIBS" + + found_crypto="OpenSSL (AES-CTR: ${ac_cv_func_EVP_aes_128_ctr:-N/A})" +fi +AM_CONDITIONAL(OPENSSL, test "$ac_cv_libssl" = "yes") + +# Look for libgcrypt +if test "$found_crypto" = "none" && test "$use_libgcrypt" != "no"; then + AC_LIB_HAVE_LINKFLAGS([gcrypt], [], [#include ]) +fi +if test "$ac_cv_libgcrypt" = "yes"; then + AC_DEFINE(LIBSSH2_LIBGCRYPT, 1, [Use libgcrypt]) + LIBSREQUIRED= # libgcrypt doesn't provide a .pc file. sad face. + LIBS="$LIBS -lgcrypt" + found_crypto=libgcrypt +fi +AM_CONDITIONAL(LIBGCRYPT, test "$ac_cv_libgcrypt" = "yes") + +# Look for Windows Cryptography API: Next Generation +if test "$found_crypto" = "none" && test "$use_wincng" != "no"; then + AC_LIB_HAVE_LINKFLAGS([bcrypt], [], [ + #include + #include + ]) + AC_LIB_HAVE_LINKFLAGS([crypt32], [], [ + #include + #include + ]) + AC_CHECK_HEADERS([ntdef.h ntstatus.h], [], [], [ + #include + ]) + AC_CHECK_DECLS([SecureZeroMemory], [], [], [ + #include + ]) +fi +if test "$ac_cv_libbcrypt" = "yes"; then + AC_DEFINE(LIBSSH2_WINCNG, 1, [Use Windows CNG]) + LIBSREQUIRED= # wincng doesn't provide a .pc file. sad face. + LIBS="$LIBS -lbcrypt" + if test "$ac_cv_libcrypt32" = "yes"; then + LIBS="$LIBS -lcrypt32" + fi + found_crypto="Windows Cryptography API: Next Generation" + if test "$ac_cv_have_decl_SecureZeroMemory" = "yes"; then + support_clear_memory=yes + fi +fi +AM_CONDITIONAL(WINCNG, test "$ac_cv_libbcrypt" = "yes") + +AM_CONDITIONAL(OS400QC3, false) + +# Check if crypto library was found +if test "$found_crypto" = "none"; then + AC_MSG_ERROR([No crypto library found! +Try --with-libssl-prefix=PATH + or --with-libgcrypt-prefix=PATH + or --with-wincng on Windows\ +]) +fi + +# Look for Libz +if test "$use_libz" != "no"; then + AC_LIB_HAVE_LINKFLAGS([z], [], [#include ]) + if test "$ac_cv_libz" != yes; then + AC_MSG_NOTICE([Cannot find zlib, disabling compression]) + AC_MSG_NOTICE([Try --with-libz-prefix=PATH if you know you have it]) + else + AC_DEFINE(LIBSSH2_HAVE_ZLIB, 1, [Compile in zlib support]) + if test "${LIBSREQUIRED}" != ""; then + LIBSREQUIRED="${LIBSREQUIRED}," + fi + LIBSREQUIRED="${LIBSREQUIRED}zlib" + fi +fi + +AC_SUBST(LIBSREQUIRED) + +# +# Optional Settings +# +AC_ARG_ENABLE(crypt-none, + AC_HELP_STRING([--enable-crypt-none],[Permit "none" cipher -- NOT RECOMMENDED]), + [AC_DEFINE(LIBSSH2_CRYPT_NONE, 1, [Enable "none" cipher -- NOT RECOMMENDED])]) + +AC_ARG_ENABLE(mac-none, + AC_HELP_STRING([--enable-mac-none],[Permit "none" MAC -- NOT RECOMMENDED]), + [AC_DEFINE(LIBSSH2_MAC_NONE, 1, [Enable "none" MAC -- NOT RECOMMENDED])]) + +AC_ARG_ENABLE(gex-new, + AC_HELP_STRING([--disable-gex-new],[Disable "new" diffie-hellman-group-exchange-sha1 method]), + [GEX_NEW=$enableval]) +if test "$GEX_NEW" != "no"; then + AC_DEFINE(LIBSSH2_DH_GEX_NEW, 1, [Enable newer diffie-hellman-group-exchange-sha1 syntax]) +fi + +AC_ARG_ENABLE(clear-memory, + AC_HELP_STRING([--disable-clear-memory],[Disable clearing of memory before being freed]), + [CLEAR_MEMORY=$enableval]) +if test "$CLEAR_MEMORY" != "no"; then + if test "$support_clear_memory" = "yes"; then + AC_DEFINE(LIBSSH2_CLEAR_MEMORY, 1, [Enable clearing of memory before being freed]) + enable_clear_memory=yes + else + if test "$CLEAR_MEMORY" = "yes"; then + AC_MSG_ERROR([secure clearing/zeroing of memory is not supported by the selected crypto backend]) + else + AC_MSG_WARN([secure clearing/zeroing of memory is not supported by the selected crypto backend]) + fi + enable_clear_memory=unsupported + fi +else + if test "$support_clear_memory" = "yes"; then + enable_clear_memory=no + else + AC_MSG_WARN([secure clearing/zeroing of memory is not supported by the selected crypto backend]) + enable_clear_memory=unsupported + fi +fi + +dnl ************************************************************ +dnl option to switch on compiler debug options +dnl +AC_MSG_CHECKING([whether to enable pedantic and debug compiler options]) +AC_ARG_ENABLE(debug, +AC_HELP_STRING([--enable-debug],[Enable pedantic and debug options]) +AC_HELP_STRING([--disable-debug],[Disable debug options]), +[ case "$enable_debug" in + no) + AC_MSG_RESULT(no) + ;; + *) AC_MSG_RESULT(yes) + enable_debug=yes + CPPFLAGS="$CPPFLAGS -DLIBSSH2DEBUG" + CFLAGS="$CFLAGS -g" + + dnl set compiler "debug" options to become more picky, and remove + dnl optimize options from CFLAGS + CURL_CC_DEBUG_OPTS + ;; + esac + ], + enable_debug=no + AC_MSG_RESULT(no) +) + +dnl ************************************************************ +dnl Enable hiding of internal symbols in library to reduce its size and +dnl speed dynamic linking of applications. This currently is only supported +dnl on gcc >= 4.0 and SunPro C. +dnl +AC_MSG_CHECKING([whether to enable hidden symbols in the library]) +AC_ARG_ENABLE(hidden-symbols, +AC_HELP_STRING([--enable-hidden-symbols],[Hide internal symbols in library]) +AC_HELP_STRING([--disable-hidden-symbols],[Leave all symbols with default visibility in library]), +[ case "$enableval" in + no) + AC_MSG_RESULT(no) + ;; + *) + AC_MSG_CHECKING([whether $CC supports it]) + if test "$GCC" = yes ; then + if $CC --help --verbose 2>&1 | grep fvisibility= > /dev/null ; then + AC_MSG_RESULT(yes) + AC_DEFINE(LIBSSH2_API, [__attribute__ ((visibility ("default")))], [to make a symbol visible]) + CFLAGS="$CFLAGS -fvisibility=hidden" + else + AC_MSG_RESULT(no) + fi + + else + dnl Test for SunPro cc + if $CC 2>&1 | grep flags >/dev/null && $CC -flags | grep xldscope= >/dev/null ; then + AC_MSG_RESULT(yes) + AC_DEFINE(LIBSSH2_API, [__global], [to make a symbol visible]) + CFLAGS="$CFLAGS -xldscope=hidden" + else + AC_MSG_RESULT(no) + fi + fi + ;; + esac ], + AC_MSG_RESULT(no) +) + +# Build example applications? +AC_MSG_CHECKING([whether to build example applications]) +AC_ARG_ENABLE([examples-build], +AC_HELP_STRING([--enable-examples-build], [Build example applications (this is the default)]) +AC_HELP_STRING([--disable-examples-build], [Do not build example applications]), +[case "$enableval" in + no | false) + build_examples='no' + ;; + *) + build_examples='yes' + ;; +esac], [build_examples='yes']) +AC_MSG_RESULT($build_examples) +AM_CONDITIONAL([BUILD_EXAMPLES], [test "x$build_examples" != "xno"]) + +# Checks for header files. +# AC_HEADER_STDC +AC_CHECK_HEADERS([errno.h fcntl.h stdio.h stdlib.h unistd.h sys/uio.h]) +AC_CHECK_HEADERS([sys/select.h sys/socket.h sys/ioctl.h sys/time.h]) +AC_CHECK_HEADERS([arpa/inet.h netinet/in.h]) +AC_CHECK_HEADERS([sys/un.h], [have_sys_un_h=yes], [have_sys_un_h=no]) +AM_CONDITIONAL([HAVE_SYS_UN_H], test "x$have_sys_un_h" = xyes) + +case $host in + *-*-cygwin* | *-*-cegcc*) + # These are POSIX-like systems using BSD-like sockets API. + ;; + *) + AC_CHECK_HEADERS([windows.h winsock2.h ws2tcpip.h]) + ;; +esac + +case $host in + *darwin*|*interix*) + dnl poll() does not work on these platforms + dnl Interix: "does provide poll(), but the implementing developer must + dnl have been in a bad mood, because poll() only works on the /proc + dnl filesystem here" + dnl Mac OS X's poll has funny behaviors, like: + dnl not being able to do poll on no fildescriptors (10.3?) + dnl not being able to poll on some files (like anything in /dev) + dnl not having reliable timeout support + dnl inconsistent return of POLLHUP where other implementations give POLLIN + AC_MSG_NOTICE([poll use is disabled on this platform]) + ;; + *) + AC_CHECK_FUNCS(poll) + ;; +esac + +AC_CHECK_FUNCS(gettimeofday select strtoll) + +dnl Check for select() into ws2_32 for Msys/Mingw +if test "$ac_cv_func_select" != "yes"; then + AC_MSG_CHECKING([for select in ws2_32]) + AC_TRY_LINK([ +#ifdef HAVE_WINSOCK2_H +#ifndef WIN32_LEAN_AND_MEAN +#define WIN32_LEAN_AND_MEAN +#endif +#include +#endif + ],[ + select(0,(fd_set *)NULL,(fd_set *)NULL,(fd_set *)NULL,(struct timeval *)NULL); + ],[ + AC_MSG_RESULT([yes]) + HAVE_SELECT="1" + AC_DEFINE_UNQUOTED(HAVE_SELECT, 1, + [Define to 1 if you have the select function.]) + ],[ + AC_MSG_RESULT([no]) + ]) +fi + +AC_FUNC_ALLOCA + +# Checks for typedefs, structures, and compiler characteristics. +AC_C_CONST +AC_C_INLINE + +CURL_CHECK_NONBLOCKING_SOCKET + +AC_CONFIG_FILES([Makefile + src/Makefile + tests/Makefile + example/Makefile + docs/Makefile + libssh2.pc]) +AC_OUTPUT + +AC_MSG_NOTICE([summary of build options: + + version: ${LIBSSH2VER} + Host type: ${host} + Install prefix: ${prefix} + Compiler: ${CC} + Compiler flags: ${CFLAGS} + Library types: Shared=${enable_shared}, Static=${enable_static} + Crypto library: ${found_crypto} + Clear memory: $enable_clear_memory + Debug build: $enable_debug + Build examples: $build_examples + Path to sshd: $ac_cv_path_SSHD (only for self-tests) + zlib compression: $ac_cv_libz +]) diff --git a/vendor/libssh2/depcomp b/vendor/libssh2/depcomp new file mode 100755 index 000000000..04701da53 --- /dev/null +++ b/vendor/libssh2/depcomp @@ -0,0 +1,530 @@ +#! /bin/sh +# depcomp - compile a program generating dependencies as side-effects + +scriptversion=2005-07-09.11 + +# Copyright (C) 1999, 2000, 2003, 2004, 2005 Free Software Foundation, Inc. + +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2, or (at your option) +# any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. + +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA +# 02110-1301, USA. + +# As a special exception to the GNU General Public License, if you +# distribute this file as part of a program that contains a +# configuration script generated by Autoconf, you may include it under +# the same distribution terms that you use for the rest of that program. + +# Originally written by Alexandre Oliva . + +case $1 in + '') + echo "$0: No command. Try \`$0 --help' for more information." 1>&2 + exit 1; + ;; + -h | --h*) + cat <<\EOF +Usage: depcomp [--help] [--version] PROGRAM [ARGS] + +Run PROGRAMS ARGS to compile a file, generating dependencies +as side-effects. + +Environment variables: + depmode Dependency tracking mode. + source Source file read by `PROGRAMS ARGS'. + object Object file output by `PROGRAMS ARGS'. + DEPDIR directory where to store dependencies. + depfile Dependency file to output. + tmpdepfile Temporary file to use when outputing dependencies. + libtool Whether libtool is used (yes/no). + +Report bugs to . +EOF + exit $? + ;; + -v | --v*) + echo "depcomp $scriptversion" + exit $? + ;; +esac + +if test -z "$depmode" || test -z "$source" || test -z "$object"; then + echo "depcomp: Variables source, object and depmode must be set" 1>&2 + exit 1 +fi + +# Dependencies for sub/bar.o or sub/bar.obj go into sub/.deps/bar.Po. +depfile=${depfile-`echo "$object" | + sed 's|[^\\/]*$|'${DEPDIR-.deps}'/&|;s|\.\([^.]*\)$|.P\1|;s|Pobj$|Po|'`} +tmpdepfile=${tmpdepfile-`echo "$depfile" | sed 's/\.\([^.]*\)$/.T\1/'`} + +rm -f "$tmpdepfile" + +# Some modes work just like other modes, but use different flags. We +# parameterize here, but still list the modes in the big case below, +# to make depend.m4 easier to write. Note that we *cannot* use a case +# here, because this file can only contain one case statement. +if test "$depmode" = hp; then + # HP compiler uses -M and no extra arg. + gccflag=-M + depmode=gcc +fi + +if test "$depmode" = dashXmstdout; then + # This is just like dashmstdout with a different argument. + dashmflag=-xM + depmode=dashmstdout +fi + +case "$depmode" in +gcc3) +## gcc 3 implements dependency tracking that does exactly what +## we want. Yay! Note: for some reason libtool 1.4 doesn't like +## it if -MD -MP comes after the -MF stuff. Hmm. + "$@" -MT "$object" -MD -MP -MF "$tmpdepfile" + stat=$? + if test $stat -eq 0; then : + else + rm -f "$tmpdepfile" + exit $stat + fi + mv "$tmpdepfile" "$depfile" + ;; + +gcc) +## There are various ways to get dependency output from gcc. Here's +## why we pick this rather obscure method: +## - Don't want to use -MD because we'd like the dependencies to end +## up in a subdir. Having to rename by hand is ugly. +## (We might end up doing this anyway to support other compilers.) +## - The DEPENDENCIES_OUTPUT environment variable makes gcc act like +## -MM, not -M (despite what the docs say). +## - Using -M directly means running the compiler twice (even worse +## than renaming). + if test -z "$gccflag"; then + gccflag=-MD, + fi + "$@" -Wp,"$gccflag$tmpdepfile" + stat=$? + if test $stat -eq 0; then : + else + rm -f "$tmpdepfile" + exit $stat + fi + rm -f "$depfile" + echo "$object : \\" > "$depfile" + alpha=ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz +## The second -e expression handles DOS-style file names with drive letters. + sed -e 's/^[^:]*: / /' \ + -e 's/^['$alpha']:\/[^:]*: / /' < "$tmpdepfile" >> "$depfile" +## This next piece of magic avoids the `deleted header file' problem. +## The problem is that when a header file which appears in a .P file +## is deleted, the dependency causes make to die (because there is +## typically no way to rebuild the header). We avoid this by adding +## dummy dependencies for each header file. Too bad gcc doesn't do +## this for us directly. + tr ' ' ' +' < "$tmpdepfile" | +## Some versions of gcc put a space before the `:'. On the theory +## that the space means something, we add a space to the output as +## well. +## Some versions of the HPUX 10.20 sed can't process this invocation +## correctly. Breaking it into two sed invocations is a workaround. + sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' | sed -e 's/$/ :/' >> "$depfile" + rm -f "$tmpdepfile" + ;; + +hp) + # This case exists only to let depend.m4 do its work. It works by + # looking at the text of this script. This case will never be run, + # since it is checked for above. + exit 1 + ;; + +sgi) + if test "$libtool" = yes; then + "$@" "-Wp,-MDupdate,$tmpdepfile" + else + "$@" -MDupdate "$tmpdepfile" + fi + stat=$? + if test $stat -eq 0; then : + else + rm -f "$tmpdepfile" + exit $stat + fi + rm -f "$depfile" + + if test -f "$tmpdepfile"; then # yes, the sourcefile depend on other files + echo "$object : \\" > "$depfile" + + # Clip off the initial element (the dependent). Don't try to be + # clever and replace this with sed code, as IRIX sed won't handle + # lines with more than a fixed number of characters (4096 in + # IRIX 6.2 sed, 8192 in IRIX 6.5). We also remove comment lines; + # the IRIX cc adds comments like `#:fec' to the end of the + # dependency line. + tr ' ' ' +' < "$tmpdepfile" \ + | sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' | \ + tr ' +' ' ' >> $depfile + echo >> $depfile + + # The second pass generates a dummy entry for each header file. + tr ' ' ' +' < "$tmpdepfile" \ + | sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' -e 's/$/:/' \ + >> $depfile + else + # The sourcefile does not contain any dependencies, so just + # store a dummy comment line, to avoid errors with the Makefile + # "include basename.Plo" scheme. + echo "#dummy" > "$depfile" + fi + rm -f "$tmpdepfile" + ;; + +aix) + # The C for AIX Compiler uses -M and outputs the dependencies + # in a .u file. In older versions, this file always lives in the + # current directory. Also, the AIX compiler puts `$object:' at the + # start of each line; $object doesn't have directory information. + # Version 6 uses the directory in both cases. + stripped=`echo "$object" | sed 's/\(.*\)\..*$/\1/'` + tmpdepfile="$stripped.u" + if test "$libtool" = yes; then + "$@" -Wc,-M + else + "$@" -M + fi + stat=$? + + if test -f "$tmpdepfile"; then : + else + stripped=`echo "$stripped" | sed 's,^.*/,,'` + tmpdepfile="$stripped.u" + fi + + if test $stat -eq 0; then : + else + rm -f "$tmpdepfile" + exit $stat + fi + + if test -f "$tmpdepfile"; then + outname="$stripped.o" + # Each line is of the form `foo.o: dependent.h'. + # Do two passes, one to just change these to + # `$object: dependent.h' and one to simply `dependent.h:'. + sed -e "s,^$outname:,$object :," < "$tmpdepfile" > "$depfile" + sed -e "s,^$outname: \(.*\)$,\1:," < "$tmpdepfile" >> "$depfile" + else + # The sourcefile does not contain any dependencies, so just + # store a dummy comment line, to avoid errors with the Makefile + # "include basename.Plo" scheme. + echo "#dummy" > "$depfile" + fi + rm -f "$tmpdepfile" + ;; + +icc) + # Intel's C compiler understands `-MD -MF file'. However on + # icc -MD -MF foo.d -c -o sub/foo.o sub/foo.c + # ICC 7.0 will fill foo.d with something like + # foo.o: sub/foo.c + # foo.o: sub/foo.h + # which is wrong. We want: + # sub/foo.o: sub/foo.c + # sub/foo.o: sub/foo.h + # sub/foo.c: + # sub/foo.h: + # ICC 7.1 will output + # foo.o: sub/foo.c sub/foo.h + # and will wrap long lines using \ : + # foo.o: sub/foo.c ... \ + # sub/foo.h ... \ + # ... + + "$@" -MD -MF "$tmpdepfile" + stat=$? + if test $stat -eq 0; then : + else + rm -f "$tmpdepfile" + exit $stat + fi + rm -f "$depfile" + # Each line is of the form `foo.o: dependent.h', + # or `foo.o: dep1.h dep2.h \', or ` dep3.h dep4.h \'. + # Do two passes, one to just change these to + # `$object: dependent.h' and one to simply `dependent.h:'. + sed "s,^[^:]*:,$object :," < "$tmpdepfile" > "$depfile" + # Some versions of the HPUX 10.20 sed can't process this invocation + # correctly. Breaking it into two sed invocations is a workaround. + sed 's,^[^:]*: \(.*\)$,\1,;s/^\\$//;/^$/d;/:$/d' < "$tmpdepfile" | + sed -e 's/$/ :/' >> "$depfile" + rm -f "$tmpdepfile" + ;; + +tru64) + # The Tru64 compiler uses -MD to generate dependencies as a side + # effect. `cc -MD -o foo.o ...' puts the dependencies into `foo.o.d'. + # At least on Alpha/Redhat 6.1, Compaq CCC V6.2-504 seems to put + # dependencies in `foo.d' instead, so we check for that too. + # Subdirectories are respected. + dir=`echo "$object" | sed -e 's|/[^/]*$|/|'` + test "x$dir" = "x$object" && dir= + base=`echo "$object" | sed -e 's|^.*/||' -e 's/\.o$//' -e 's/\.lo$//'` + + if test "$libtool" = yes; then + # With Tru64 cc, shared objects can also be used to make a + # static library. This mecanism is used in libtool 1.4 series to + # handle both shared and static libraries in a single compilation. + # With libtool 1.4, dependencies were output in $dir.libs/$base.lo.d. + # + # With libtool 1.5 this exception was removed, and libtool now + # generates 2 separate objects for the 2 libraries. These two + # compilations output dependencies in in $dir.libs/$base.o.d and + # in $dir$base.o.d. We have to check for both files, because + # one of the two compilations can be disabled. We should prefer + # $dir$base.o.d over $dir.libs/$base.o.d because the latter is + # automatically cleaned when .libs/ is deleted, while ignoring + # the former would cause a distcleancheck panic. + tmpdepfile1=$dir.libs/$base.lo.d # libtool 1.4 + tmpdepfile2=$dir$base.o.d # libtool 1.5 + tmpdepfile3=$dir.libs/$base.o.d # libtool 1.5 + tmpdepfile4=$dir.libs/$base.d # Compaq CCC V6.2-504 + "$@" -Wc,-MD + else + tmpdepfile1=$dir$base.o.d + tmpdepfile2=$dir$base.d + tmpdepfile3=$dir$base.d + tmpdepfile4=$dir$base.d + "$@" -MD + fi + + stat=$? + if test $stat -eq 0; then : + else + rm -f "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3" "$tmpdepfile4" + exit $stat + fi + + for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3" "$tmpdepfile4" + do + test -f "$tmpdepfile" && break + done + if test -f "$tmpdepfile"; then + sed -e "s,^.*\.[a-z]*:,$object:," < "$tmpdepfile" > "$depfile" + # That's a tab and a space in the []. + sed -e 's,^.*\.[a-z]*:[ ]*,,' -e 's,$,:,' < "$tmpdepfile" >> "$depfile" + else + echo "#dummy" > "$depfile" + fi + rm -f "$tmpdepfile" + ;; + +#nosideeffect) + # This comment above is used by automake to tell side-effect + # dependency tracking mechanisms from slower ones. + +dashmstdout) + # Important note: in order to support this mode, a compiler *must* + # always write the preprocessed file to stdout, regardless of -o. + "$@" || exit $? + + # Remove the call to Libtool. + if test "$libtool" = yes; then + while test $1 != '--mode=compile'; do + shift + done + shift + fi + + # Remove `-o $object'. + IFS=" " + for arg + do + case $arg in + -o) + shift + ;; + $object) + shift + ;; + *) + set fnord "$@" "$arg" + shift # fnord + shift # $arg + ;; + esac + done + + test -z "$dashmflag" && dashmflag=-M + # Require at least two characters before searching for `:' + # in the target name. This is to cope with DOS-style filenames: + # a dependency such as `c:/foo/bar' could be seen as target `c' otherwise. + "$@" $dashmflag | + sed 's:^[ ]*[^: ][^:][^:]*\:[ ]*:'"$object"'\: :' > "$tmpdepfile" + rm -f "$depfile" + cat < "$tmpdepfile" > "$depfile" + tr ' ' ' +' < "$tmpdepfile" | \ +## Some versions of the HPUX 10.20 sed can't process this invocation +## correctly. Breaking it into two sed invocations is a workaround. + sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' | sed -e 's/$/ :/' >> "$depfile" + rm -f "$tmpdepfile" + ;; + +dashXmstdout) + # This case only exists to satisfy depend.m4. It is never actually + # run, as this mode is specially recognized in the preamble. + exit 1 + ;; + +makedepend) + "$@" || exit $? + # Remove any Libtool call + if test "$libtool" = yes; then + while test $1 != '--mode=compile'; do + shift + done + shift + fi + # X makedepend + shift + cleared=no + for arg in "$@"; do + case $cleared in + no) + set ""; shift + cleared=yes ;; + esac + case "$arg" in + -D*|-I*) + set fnord "$@" "$arg"; shift ;; + # Strip any option that makedepend may not understand. Remove + # the object too, otherwise makedepend will parse it as a source file. + -*|$object) + ;; + *) + set fnord "$@" "$arg"; shift ;; + esac + done + obj_suffix="`echo $object | sed 's/^.*\././'`" + touch "$tmpdepfile" + ${MAKEDEPEND-makedepend} -o"$obj_suffix" -f"$tmpdepfile" "$@" + rm -f "$depfile" + cat < "$tmpdepfile" > "$depfile" + sed '1,2d' "$tmpdepfile" | tr ' ' ' +' | \ +## Some versions of the HPUX 10.20 sed can't process this invocation +## correctly. Breaking it into two sed invocations is a workaround. + sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' | sed -e 's/$/ :/' >> "$depfile" + rm -f "$tmpdepfile" "$tmpdepfile".bak + ;; + +cpp) + # Important note: in order to support this mode, a compiler *must* + # always write the preprocessed file to stdout. + "$@" || exit $? + + # Remove the call to Libtool. + if test "$libtool" = yes; then + while test $1 != '--mode=compile'; do + shift + done + shift + fi + + # Remove `-o $object'. + IFS=" " + for arg + do + case $arg in + -o) + shift + ;; + $object) + shift + ;; + *) + set fnord "$@" "$arg" + shift # fnord + shift # $arg + ;; + esac + done + + "$@" -E | + sed -n -e '/^# [0-9][0-9]* "\([^"]*\)".*/ s:: \1 \\:p' \ + -e '/^#line [0-9][0-9]* "\([^"]*\)".*/ s:: \1 \\:p' | + sed '$ s: \\$::' > "$tmpdepfile" + rm -f "$depfile" + echo "$object : \\" > "$depfile" + cat < "$tmpdepfile" >> "$depfile" + sed < "$tmpdepfile" '/^$/d;s/^ //;s/ \\$//;s/$/ :/' >> "$depfile" + rm -f "$tmpdepfile" + ;; + +msvisualcpp) + # Important note: in order to support this mode, a compiler *must* + # always write the preprocessed file to stdout, regardless of -o, + # because we must use -o when running libtool. + "$@" || exit $? + IFS=" " + for arg + do + case "$arg" in + "-Gm"|"/Gm"|"-Gi"|"/Gi"|"-ZI"|"/ZI") + set fnord "$@" + shift + shift + ;; + *) + set fnord "$@" "$arg" + shift + shift + ;; + esac + done + "$@" -E | + sed -n '/^#line [0-9][0-9]* "\([^"]*\)"/ s::echo "`cygpath -u \\"\1\\"`":p' | sort | uniq > "$tmpdepfile" + rm -f "$depfile" + echo "$object : \\" > "$depfile" + . "$tmpdepfile" | sed 's% %\\ %g' | sed -n '/^\(.*\)$/ s:: \1 \\:p' >> "$depfile" + echo " " >> "$depfile" + . "$tmpdepfile" | sed 's% %\\ %g' | sed -n '/^\(.*\)$/ s::\1\::p' >> "$depfile" + rm -f "$tmpdepfile" + ;; + +none) + exec "$@" + ;; + +*) + echo "Unknown depmode $depmode" 1>&2 + exit 1 + ;; +esac + +exit 0 + +# Local Variables: +# mode: shell-script +# sh-indentation: 2 +# eval: (add-hook 'write-file-hooks 'time-stamp) +# time-stamp-start: "scriptversion=" +# time-stamp-format: "%:y-%02m-%02d.%02H" +# time-stamp-end: "$" +# End: diff --git a/vendor/libssh2/docs/AUTHORS b/vendor/libssh2/docs/AUTHORS new file mode 100644 index 000000000..5c7445bf1 --- /dev/null +++ b/vendor/libssh2/docs/AUTHORS @@ -0,0 +1,79 @@ + libssh2 is the result of many friendly people. This list is an attempt to + mention all contributors. If we've missed anyone, tell us! + + This list of names is a-z sorted. + +Adam Gobiowski +Alexander Holyapin +Alexander Lamaison +Alfred Gebert +Ben Kibbey +Bjorn Stenborg +Carlo Bramini +Cristian Rodríguez +Daiki Ueno +Dan Casey +Dan Fandrich +Daniel Stenberg +Dave Hayden +Dave McCaldon +David J Sullivan +David Robins +Dmitry Smirnov +Douglas Masterson +Edink Kadribasic +Erik Brossler +Francois Dupoux +Gellule Xg +Grubsky Grigory +Guenter Knauf +Heiner Steven +Henrik Nordstrom +James Housleys +Jasmeet Bagga +Jean-Louis Charton +Jernej Kovacic +Joey Degges +John Little +Jose Baars +Jussi Mononen +Kamil Dudka +Lars Nordin +Mark McPherson +Mark Smith +Markus Moeller +Matt Lilley +Matthew Booth +Maxime Larocque +Mike Protts +Mikhail Gusarov +Neil Gierman +Olivier Hervieu +Paul Howarth +Paul Querna +Paul Veldkamp +Peter Krempa +Peter O'Gorman +Peter Stuge +Pierre Joye +Rafael Kitover +Romain Bondue +Sara Golemon +Satish Mittal +Sean Peterson +Selcuk Gueney +Simon Hart +Simon Josefsson +Sofian Brabez +Steven Ayre +Steven Dake +Steven Van Ingelgem +TJ Saunders +Tommy Lindgren +Tor Arntsen +Vincent Jaulin +Vincent Torri +Vlad Grachov +Wez Furlong +Yang Tse +Zl Liu diff --git a/vendor/libssh2/docs/BINDINGS b/vendor/libssh2/docs/BINDINGS new file mode 100644 index 000000000..b97758fd9 --- /dev/null +++ b/vendor/libssh2/docs/BINDINGS @@ -0,0 +1,29 @@ + +Creative people have written bindings or interfaces for various environments +and programming languages. Using one of these bindings allows you to take +advantage of libssh2 directly from within your favourite language. + +The bindings listed below are not part of the libssh2 distribution archives, +but must be downloaded and installed separately. + +Cocoa/Objective-C + https://github.com/karelia/libssh2_sftp-Cocoa-wrapper + +Haskell + FFI bindings - http://hackage.haskell.org/package/libssh2 + +Perl + Net::SSH2 - http://search.cpan.org/~rkitover/Net-SSH2-0.45/lib/Net/SSH2.pm + +PHP + ssh2 - http://pecl.php.net/package/ssh2 + +Python + pylibssh2 - http://www.wallix.org/pylibssh2-project/ + +Python-ctypes + + PySsh2 - https://github.com/gellule/PySsh2 + +Ruby + libssh2-ruby - https://github.com/mitchellh/libssh2-ruby diff --git a/vendor/libssh2/docs/CMakeLists.txt b/vendor/libssh2/docs/CMakeLists.txt new file mode 100644 index 000000000..3e9d165ef --- /dev/null +++ b/vendor/libssh2/docs/CMakeLists.txt @@ -0,0 +1,206 @@ +# Copyright (c) 2014 Alexander Lamaison +# +# Redistribution and use in source and binary forms, +# with or without modification, are permitted provided +# that the following conditions are met: +# +# Redistributions of source code must retain the above +# copyright notice, this list of conditions and the +# following disclaimer. +# +# Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials +# provided with the distribution. +# +# Neither the name of the copyright holder nor the names +# of any other contributors may be used to endorse or +# promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND +# CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, +# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES +# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY +# OF SUCH DAMAGE. + +set(MAN_PAGES + libssh2_agent_connect.3 + libssh2_agent_disconnect.3 + libssh2_agent_free.3 + libssh2_agent_get_identity.3 + libssh2_agent_init.3 + libssh2_agent_list_identities.3 + libssh2_agent_userauth.3 + libssh2_banner_set.3 + libssh2_base64_decode.3 + libssh2_channel_close.3 + libssh2_channel_direct_tcpip.3 + libssh2_channel_direct_tcpip_ex.3 + libssh2_channel_eof.3 + libssh2_channel_exec.3 + libssh2_channel_flush.3 + libssh2_channel_flush_ex.3 + libssh2_channel_flush_stderr.3 + libssh2_channel_forward_accept.3 + libssh2_channel_forward_cancel.3 + libssh2_channel_forward_listen.3 + libssh2_channel_forward_listen_ex.3 + libssh2_channel_free.3 + libssh2_channel_get_exit_signal.3 + libssh2_channel_get_exit_status.3 + libssh2_channel_handle_extended_data.3 + libssh2_channel_handle_extended_data2.3 + libssh2_channel_ignore_extended_data.3 + libssh2_channel_open_ex.3 + libssh2_channel_open_session.3 + libssh2_channel_process_startup.3 + libssh2_channel_read.3 + libssh2_channel_read_ex.3 + libssh2_channel_read_stderr.3 + libssh2_channel_receive_window_adjust.3 + libssh2_channel_receive_window_adjust2.3 + libssh2_channel_request_pty.3 + libssh2_channel_request_pty_ex.3 + libssh2_channel_request_pty_size.3 + libssh2_channel_request_pty_size_ex.3 + libssh2_channel_send_eof.3 + libssh2_channel_set_blocking.3 + libssh2_channel_setenv.3 + libssh2_channel_setenv_ex.3 + libssh2_channel_shell.3 + libssh2_channel_subsystem.3 + libssh2_channel_wait_closed.3 + libssh2_channel_wait_eof.3 + libssh2_channel_window_read.3 + libssh2_channel_window_read_ex.3 + libssh2_channel_window_write.3 + libssh2_channel_window_write_ex.3 + libssh2_channel_write.3 + libssh2_channel_write_ex.3 + libssh2_channel_write_stderr.3 + libssh2_channel_x11_req.3 + libssh2_channel_x11_req_ex.3 + libssh2_exit.3 + libssh2_free.3 + libssh2_hostkey_hash.3 + libssh2_init.3 + libssh2_keepalive_config.3 + libssh2_keepalive_send.3 + libssh2_knownhost_add.3 + libssh2_knownhost_addc.3 + libssh2_knownhost_check.3 + libssh2_knownhost_checkp.3 + libssh2_knownhost_del.3 + libssh2_knownhost_free.3 + libssh2_knownhost_get.3 + libssh2_knownhost_init.3 + libssh2_knownhost_readfile.3 + libssh2_knownhost_readline.3 + libssh2_knownhost_writefile.3 + libssh2_knownhost_writeline.3 + libssh2_poll.3 + libssh2_poll_channel_read.3 + libssh2_publickey_add.3 + libssh2_publickey_add_ex.3 + libssh2_publickey_init.3 + libssh2_publickey_list_fetch.3 + libssh2_publickey_list_free.3 + libssh2_publickey_remove.3 + libssh2_publickey_remove_ex.3 + libssh2_publickey_shutdown.3 + libssh2_scp_recv.3 + libssh2_scp_recv2.3 + libssh2_scp_send.3 + libssh2_scp_send64.3 + libssh2_scp_send_ex.3 + libssh2_session_abstract.3 + libssh2_session_banner_get.3 + libssh2_session_banner_set.3 + libssh2_session_block_directions.3 + libssh2_session_callback_set.3 + libssh2_session_disconnect.3 + libssh2_session_disconnect_ex.3 + libssh2_session_flag.3 + libssh2_session_free.3 + libssh2_session_get_blocking.3 + libssh2_session_get_timeout.3 + libssh2_session_hostkey.3 + libssh2_session_init.3 + libssh2_session_init_ex.3 + libssh2_session_last_errno.3 + libssh2_session_last_error.3 + libssh2_session_set_last_error.3 + libssh2_session_method_pref.3 + libssh2_session_methods.3 + libssh2_session_set_blocking.3 + libssh2_session_set_timeout.3 + libssh2_session_startup.3 + libssh2_session_supported_algs.3 + libssh2_sftp_close.3 + libssh2_sftp_close_handle.3 + libssh2_sftp_closedir.3 + libssh2_sftp_fsetstat.3 + libssh2_sftp_fstat.3 + libssh2_sftp_fstat_ex.3 + libssh2_sftp_fstatvfs.3 + libssh2_sftp_fsync.3 + libssh2_sftp_get_channel.3 + libssh2_sftp_init.3 + libssh2_sftp_last_error.3 + libssh2_sftp_lstat.3 + libssh2_sftp_mkdir.3 + libssh2_sftp_mkdir_ex.3 + libssh2_sftp_open.3 + libssh2_sftp_open_ex.3 + libssh2_sftp_opendir.3 + libssh2_sftp_read.3 + libssh2_sftp_readdir.3 + libssh2_sftp_readdir_ex.3 + libssh2_sftp_readlink.3 + libssh2_sftp_realpath.3 + libssh2_sftp_rename.3 + libssh2_sftp_rename_ex.3 + libssh2_sftp_rewind.3 + libssh2_sftp_rmdir.3 + libssh2_sftp_rmdir_ex.3 + libssh2_sftp_seek.3 + libssh2_sftp_seek64.3 + libssh2_sftp_setstat.3 + libssh2_sftp_shutdown.3 + libssh2_sftp_stat.3 + libssh2_sftp_stat_ex.3 + libssh2_sftp_statvfs.3 + libssh2_sftp_symlink.3 + libssh2_sftp_symlink_ex.3 + libssh2_sftp_tell.3 + libssh2_sftp_tell64.3 + libssh2_sftp_unlink.3 + libssh2_sftp_unlink_ex.3 + libssh2_sftp_write.3 + libssh2_trace.3 + libssh2_trace_sethandler.3 + libssh2_userauth_authenticated.3 + libssh2_userauth_hostbased_fromfile.3 + libssh2_userauth_hostbased_fromfile_ex.3 + libssh2_userauth_keyboard_interactive.3 + libssh2_userauth_keyboard_interactive_ex.3 + libssh2_userauth_list.3 + libssh2_userauth_password.3 + libssh2_userauth_password_ex.3 + libssh2_userauth_publickey.3 + libssh2_userauth_publickey_fromfile.3 + libssh2_userauth_publickey_fromfile_ex.3 + libssh2_version.3) + +include(GNUInstallDirs) +install(FILES ${MAN_PAGES} DESTINATION ${CMAKE_INSTALL_MANDIR}/man3) diff --git a/vendor/libssh2/docs/HACKING b/vendor/libssh2/docs/HACKING new file mode 100644 index 000000000..5da8e66c2 --- /dev/null +++ b/vendor/libssh2/docs/HACKING @@ -0,0 +1,13 @@ + +libssh2 source code style guide: + + - 4 level indent + - spaces-only (no tabs) + - open braces on the if/for line: + + if (banana) { + go_nuts(); + } + + - keep source lines shorter than 80 columns + - See libssh2-style.el for how to achieve this within Emacs diff --git a/vendor/libssh2/docs/HACKING.CRYPTO b/vendor/libssh2/docs/HACKING.CRYPTO new file mode 100644 index 000000000..a8a6a0618 --- /dev/null +++ b/vendor/libssh2/docs/HACKING.CRYPTO @@ -0,0 +1,593 @@ + Definitions needed to implement a specific crypto library + +This document offers some hints about implementing a new crypto library +interface. + +A crypto library interface consists of at least a header file, defining +entities referenced from the libssh2 core modules. +Real code implementation (if needed), is left at the implementor's choice. + +This document lists the entities that must/may be defined in the header file. + +Procedures listed as "void" may indeed have a result type: the void indication +indicates the libssh2 core modules never use the function result. + + +1) Crypto library initialization/termination. + +void libssh2_crypto_init(void); +Initializes the crypto library. May be an empty macro if not needed. + +void libssh2_crypto_exit(void); +Terminates the crypto library use. May be an empty macro if not needed. + + +2) HMAC + +libssh2_hmac_ctx +Type of an HMAC computation context. Generally a struct. +Used for all hash algorithms. + +void libssh2_hmac_ctx_init(libssh2_hmac_ctx ctx); +Initializes the HMAC computation context ctx. +Called before setting-up the hash algorithm. +Note: if the ctx parameter is modified by the underlying code, +this procedure must be implemented as a macro to map ctx --> &ctx. + +void libssh2_hmac_update(libssh2_hmac_ctx ctx, + const unsigned char *data, + int datalen); +Continue computation of an HMAC on datalen bytes at data using context ctx. +Note: if the ctx parameter is modified by the underlying code, +this procedure must be implemented as a macro to map ctx --> &ctx. + +void libssh2_hmac_final(libssh2_hmac_ctx ctx, + unsigned char output[]); +Get the computed HMAC from context ctx into the output buffer. The +minimum data buffer size depends on the HMAC hash algorithm. +Note: if the ctx parameter is modified by the underlying code, +this procedure must be implemented as a macro to map ctx --> &ctx. + +void libssh2_hmac_cleanup(libssh2_hmac_ctx *ctx); +Releases the HMAC computation context at ctx. + + +3) Hash algorithms. + +3.1) SHA-1 +Must always be implemented. + +SHA_DIGEST_LENGTH +#define to 20, the SHA-1 digest length. + +libssh2_sha1_ctx +Type of an SHA1 computation context. Generally a struct. + +int libssh2_sha1_init(libssh2_sha1_ctx *x); +Initializes the SHA-1 computation context at x. +Returns 1 for success and 0 for failure + +void libssh2_sha1_update(libssh2_sha1_ctx ctx, + const unsigned char *data, + size_t len); +Continue computation of SHA-1 on len bytes at data using context ctx. +Note: if the ctx parameter is modified by the underlying code, +this procedure must be implemented as a macro to map ctx --> &ctx. + +void libssh2_sha1_final(libssh2_sha1_ctx ctx, + unsigned char output[SHA1_DIGEST_LEN]); +Get the computed SHA-1 signature from context ctx and store it into the +output buffer. +Release the context. +Note: if the ctx parameter is modified by the underlying code, +this procedure must be implemented as a macro to map ctx --> &ctx. + +void libssh2_hmac_sha1_init(libssh2_hmac_ctx *ctx, + const void *key, + int keylen); +Setup the HMAC computation context ctx for an HMAC-SHA-1 computation using the +keylen-byte key. Is invoked just after libssh2_hmac_ctx_init(). + +3.2) SHA-256 +Must always be implemented. + +SHA256_DIGEST_LENGTH +#define to 32, the SHA-256 digest length. + +libssh2_sha256_ctx +Type of an SHA-256 computation context. Generally a struct. + +int libssh2_sha256_init(libssh2_sha256_ctx *x); +Initializes the SHA-256 computation context at x. +Returns 1 for success and 0 for failure + +void libssh2_sha256_update(libssh2_sha256_ctx ctx, + const unsigned char *data, + size_t len); +Continue computation of SHA-256 on len bytes at data using context ctx. +Note: if the ctx parameter is modified by the underlying code, +this procedure must be implemented as a macro to map ctx --> &ctx. + +void libssh2_sha256_final(libssh2_sha256_ctx ctx, + unsigned char output[SHA256_DIGEST_LENGTH]); +Gets the computed SHA-256 signature from context ctx into the output buffer. +Release the context. +Note: if the ctx parameter is modified by the underlying code, +this procedure must be implemented as a macro to map ctx --> &ctx. + +int libssh2_sha256(const unsigned char *message, + unsigned long len, + unsigned char output[SHA256_DIGEST_LENGTH]); +Computes the SHA-256 signature over the given message of length len and +store the result into the output buffer. +Return 1 if error, else 0. +Note: Seems unused in current code, but defined in each crypto library backend. + +LIBSSH2_HMAC_SHA256 +#define as 1 if the crypto library supports HMAC-SHA-256, else 0. +If defined as 0, the rest of this section can be omitted. + +void libssh2_hmac_sha256_init(libssh2_hmac_ctx *ctx, + const void *key, + int keylen); +Setup the HMAC computation context ctx for an HMAC-256 computation using the +keylen-byte key. Is invoked just after libssh2_hmac_ctx_init(). + +3.3) SHA-512 +LIBSSH2_HMAC_SHA512 +#define as 1 if the crypto library supports HMAC-SHA-512, else 0. +If defined as 0, the rest of this section can be omitted. + +SHA512_DIGEST_LENGTH +#define to 64, the SHA-512 digest length. + +void libssh2_hmac_sha512_init(libssh2_hmac_ctx *ctx, + const void *key, + int keylen); +Setup the HMAC computation context ctx for an HMAC-512 computation using the +keylen-byte key. Is invoked just after libssh2_hmac_ctx_init(). + +3.4) MD5 +LIBSSH2_MD5 +#define to 1 if the crypto library supports MD5, else 0. +If defined as 0, the rest of this section can be omitted. + +MD5_DIGEST_LENGTH +#define to 16, the MD5 digest length. + +libssh2_md5_ctx +Type of an MD5 computation context. Generally a struct. + +int libssh2_md5_init(libssh2_md5_ctx *x); +Initializes the MD5 computation context at x. +Returns 1 for success and 0 for failure + +void libssh2_md5_update(libssh2_md5_ctx ctx, + const unsigned char *data, + size_t len); +Continues computation of MD5 on len bytes at data using context ctx. +Returns 1 for success and 0 for failure. +Note: if the ctx parameter is modified by the underlying code, +this procedure must be implemented as a macro to map ctx --> &ctx. + +void libssh2_md5_final(libssh2_md5_ctx ctx, + unsigned char output[MD5_DIGEST_LENGTH]); +Gets the computed MD5 signature from context ctx into the output buffer. +Release the context. +Note: if the ctx parameter is modified by the underlying code, +this procedure must be implemented as a macro to map ctx --> &ctx. + +void libssh2_hmac_md5_init(libssh2_hmac_ctx *ctx, + const void *key, + int keylen); +Setup the HMAC computation context ctx for an HMAC-MD5 computation using the +keylen-byte key. Is invoked just after libssh2_hmac_ctx_init(). + +3.5) RIPEMD-160 +LIBSSH2_HMAC_RIPEMD +#define as 1 if the crypto library supports HMAC-RIPEMD-160, else 0. +If defined as 0, the rest of this section can be omitted. + +void libssh2_hmac_ripemd160_init(libssh2_hmac_ctx *ctx, + const void *key, + int keylen); +Setup the HMAC computation context ctx for an HMAC-RIPEMD-160 computation using +the keylen-byte key. Is invoked just after libssh2_hmac_ctx_init(). +Returns 1 for success and 0 for failure. + + +4) Bidirectional Key ciphers. + +_libssh2_cipher_ctx +Type of a cipher computation context. + +_libssh2_cipher_type(name); +Macro defining name as storage identifying a cipher algorithm for +the crypto library interface. No trailing semicolon. + +int _libssh2_cipher_init(_libssh2_cipher_ctx *h, + _libssh2_cipher_type(algo), + unsigned char *iv, + unsigned char *secret, + int encrypt); +Creates a cipher context for the given algorithm with the initialization vector +iv and the secret key secret. Prepare for encryption or decryption depending on +encrypt. +Return 0 if OK, else -1. +This procedure is already prototyped in crypto.h. + +int _libssh2_cipher_crypt(_libssh2_cipher_ctx *ctx, + _libssh2_cipher_type(algo), + int encrypt, + unsigned char *block, + size_t blocksize); +Encrypt or decrypt in-place data at (block, blocksize) using the given +context and/or algorithm. +Return 0 if OK, else -1. +This procedure is already prototyped in crypto.h. + +void _libssh2_cipher_dtor(_libssh2_cipher_ctx *ctx); +Release cipher context at ctx. + +4.1) AES +4.1.1) AES in CBC block mode. +LIBSSH2_AES +#define as 1 if the crypto library supports AES in CBC mode, else 0. +If defined as 0, the rest of this section can be omitted. + +_libssh2_cipher_aes128 +AES-128-CBC algorithm identifier initializer. +#define with constant value of type _libssh2_cipher_type(). + +_libssh2_cipher_aes192 +AES-192-CBC algorithm identifier initializer. +#define with constant value of type _libssh2_cipher_type(). + +_libssh2_cipher_aes256 +AES-256-CBC algorithm identifier initializer. +#define with constant value of type _libssh2_cipher_type(). + +4.1.2) AES in CTR block mode. +LIBSSH2_AES_CTR +#define as 1 if the crypto library supports AES in CTR mode, else 0. +If defined as 0, the rest of this section can be omitted. + +void _libssh2_init_aes_ctr(void); +Initialize static AES CTR ciphers. +This procedure is already prototyped in crypto.h. + +_libssh2_cipher_aes128ctr +AES-128-CTR algorithm identifier initializer. +#define with constant value of type _libssh2_cipher_type(). + +_libssh2_cipher_aes192ctr +AES-192-CTR algorithm identifier initializer. +#define with constant value of type _libssh2_cipher_type(). + +_libssh2_cipher_aes256ctr +AES-256-CTR algorithm identifier initializer. +#define with constant value of type _libssh2_cipher_type(). + +4.2) Blowfish in CBC block mode. +LIBSSH2_BLOWFISH +#define as 1 if the crypto library supports blowfish in CBC mode, else 0. +If defined as 0, the rest of this section can be omitted. + +_libssh2_cipher_blowfish +Blowfish-CBC algorithm identifier initializer. +#define with constant value of type _libssh2_cipher_type(). + +4.3) RC4. +LIBSSH2_RC4 +#define as 1 if the crypto library supports RC4 (arcfour), else 0. +If defined as 0, the rest of this section can be omitted. + +_libssh2_cipher_arcfour +RC4 algorithm identifier initializer. +#define with constant value of type _libssh2_cipher_type(). + +4.4) CAST5 in CBC block mode. +LIBSSH2_CAST +#define 1 if the crypto library supports cast, else 0. +If defined as 0, the rest of this section can be omitted. + +_libssh2_cipher_cast5 +CAST5-CBC algorithm identifier initializer. +#define with constant value of type _libssh2_cipher_type(). + +4.5) Tripple DES in CBC block mode. +LIBSSH2_3DES +#define as 1 if the crypto library supports TripleDES in CBC mode, else 0. +If defined as 0, the rest of this section can be omitted. + +_libssh2_cipher_3des +TripleDES-CBC algorithm identifier initializer. +#define with constant value of type _libssh2_cipher_type(). + + +5) Big numbers. +Positive multi-byte integers support is sufficient. + +5.1) Computation contexts. +This has a real meaning if the big numbers computations need some context +storage. If not, use a dummy type and functions (macros). + +_libssh2_bn_ctx +Type of multiple precision computation context. May not be empty. if not used, +#define as char, for example. + +libssh2_bn_ctx _libssh2_bn_ctx_new(void); +Returns a new multiple precision computation context. + +void _libssh2_bn_ctx_free(_libssh2_bn_ctx ctx); +Releases a multiple precision computation context. + +5.2) Computation support. +_libssh2_bn +Type of multiple precision numbers (aka bignumbers or huge integers) for the +crypto library. + +_libssh2_bn * _libssh2_bn_init(void); +Creates a multiple precision number (preset to zero). + +_libssh2_bn * _libssh2_bn_init_from_bin(void); +Create a multiple precision number intended to be set by the +_libssh2_bn_from_bin() function (see below). Unlike _libssh2_bn_init(), this +code may be a dummy initializer if the _libssh2_bn_from_bin() actually +allocates the number. Returns a value of type _libssh2_bn *. + +void _libssh2_bn_free(_libssh2_bn *bn); +Destroys the multiple precision number at bn. + +unsigned long _libssh2_bn_bytes(libssh2_bn *bn); +Get the number of bytes needed to store the bits of the multiple precision +number at bn. + +unsigned long _libssh2_bn_bits(_libssh2_bn *bn); +Returns the number of bits of multiple precision number at bn. + +int _libssh2_bn_set_word(_libssh2_bn *bn, unsigned long val); +Sets the value of bn to val. +Returns 1 on success, 0 otherwise. + +_libssh2_bn * _libssh2_bn_from_bin(_libssh2_bn *bn, int len, + const unsigned char *val); +Converts the positive integer in big-endian form of length len at val +into a _libssh2_bn and place it in bn. If bn is NULL, a new _libssh2_bn is +created. +Returns a pointer to target _libssh2_bn or NULL if error. + +int _libssh2_bn_to_bin(_libssh2_bn *bn, unsigned char *val); +Converts the absolute value of bn into big-endian form and store it at +val. val must point to _libssh2_bn_bytes(bn) bytes of memory. +Returns the length of the big-endian number. + +void _libssh2_bn_rand(_libssh2_bn *bn, int bits, int top, int bottom); +Generates a cryptographically strong pseudo-random number of bits in +length and stores it in bn. If top is -1, the most significant bit of the +random number can be zero. If top is 0, it is set to 1, and if top is 1, the +two most significant bits of the number will be set to 1, so that the product +of two such random numbers will always have 2*bits length. If bottom is true, +the number will be odd. + +void _libssh2_bn_mod_exp(_libssh2_bn *r, _libssh2_bn *a, + _libssh2_bn *p, _libssh2_bn *m, + _libssh2_bn_ctx *ctx); +Computes a to the p-th power modulo m and stores the result into r (r=a^p % m). +May use the given context. + + +6) Private key algorithms. +Format of an RSA public key: +a) "ssh-rsa". +b) RSA exponent, MSB first, with high order bit = 0. +c) RSA modulus, MSB first, with high order bit = 0. +Each item is preceded by its 32-bit byte length, MSB first. + +Format of a DSA public key: +a) "ssh-dss". +b) p, MSB first, with high order bit = 0. +c) q, MSB first, with high order bit = 0. +d) g, MSB first, with high order bit = 0. +e) pub_key, MSB first, with high order bit = 0. +Each item is preceded by its 32-bit byte length, MSB first. + +int _libssh2_pub_priv_keyfile(LIBSSH2_SESSION *session, + unsigned char **method, + size_t *method_len, + unsigned char **pubkeydata, + size_t *pubkeydata_len, + const char *privatekey, + const char *passphrase); +Reads a private key from file privatekey and extract the public key --> +(pubkeydata, pubkeydata_len). Store the associated method (ssh-rsa or ssh-dss) +into (method, method_len). +Both buffers have to be allocated using LIBSSH2_ALLOC(). +Returns 0 if OK, else -1. +This procedure is already prototyped in crypto.h. + +int _libssh2_pub_priv_keyfilememory(LIBSSH2_SESSION *session, + unsigned char **method, + size_t *method_len, + unsigned char **pubkeydata, + size_t *pubkeydata_len, + const char *privatekeydata, + size_t privatekeydata_len, + const char *passphrase); +Gets a private key from bytes at (privatekeydata, privatekeydata_len) and +extract the public key --> (pubkeydata, pubkeydata_len). Store the associated +method (ssh-rsa or ssh-dss) into (method, method_len). +Both buffers have to be allocated using LIBSSH2_ALLOC(). +Returns 0 if OK, else -1. +This procedure is already prototyped in crypto.h. + +6.1) RSA +LIBSSH2_RSA +#define as 1 if the crypto library supports RSA, else 0. +If defined as 0, the rest of this section can be omitted. + +libssh2_rsa_ctx +Type of an RSA computation context. Generally a struct. + +int _libssh2_rsa_new(libssh2_rsa_ctx **rsa, + const unsigned char *edata, + unsigned long elen, + const unsigned char *ndata, + unsigned long nlen, + const unsigned char *ddata, + unsigned long dlen, + const unsigned char *pdata, + unsigned long plen, + const unsigned char *qdata, + unsigned long qlen, + const unsigned char *e1data, + unsigned long e1len, + const unsigned char *e2data, + unsigned long e2len, + const unsigned char *coeffdata, unsigned long coefflen); +Creates a new context for RSA computations from key source values: + pdata, plen Prime number p. Only used if private key known (ddata). + qdata, qlen Prime number q. Only used if private key known (ddata). + ndata, nlen Modulus n. + edata, elen Exponent e. + ddata, dlen e^-1 % phi(n) = private key. May be NULL if unknown. + e1data, e1len dp = d % (p-1). Only used if private key known (dtata). + e2data, e2len dq = d % (q-1). Only used if private key known (dtata). + coeffdata, coefflen q^-1 % p. Only used if private key known. +Returns 0 if OK. +This procedure is already prototyped in crypto.h. +Note: the current generic code only calls this function with e and n (public +key parameters): unless used internally by the backend, it is not needed to +support the private key and the other parameters here. + +int _libssh2_rsa_new_private(libssh2_rsa_ctx **rsa, + LIBSSH2_SESSION *session, + const char *filename, + unsigned const char *passphrase); +Reads an RSA private key from file filename into a new RSA context. +Must call _libssh2_init_if_needed(). +Return 0 if OK, else -1. +This procedure is already prototyped in crypto.h. + +int _libssh2_rsa_new_private_frommemory(libssh2_rsa_ctx **rsa, + LIBSSH2_SESSION *session, + const char *data, + size_t data_len, + unsigned const char *passphrase); +Gets an RSA private key from data into a new RSA context. +Must call _libssh2_init_if_needed(). +Return 0 if OK, else -1. +This procedure is already prototyped in crypto.h. + +int _libssh2_rsa_sha1_verify(libssh2_rsa_ctx *rsa, + const unsigned char *sig, + unsigned long sig_len, + const unsigned char *m, unsigned long m_len); +Verify (sig, siglen) signature of (m, m_len) using an SHA-1 hash and the +RSA context. +Return 0 if OK, else -1. +This procedure is already prototyped in crypto.h. + +int _libssh2_rsa_sha1_signv(LIBSSH2_SESSION *session, + unsigned char **sig, size_t *siglen, + int count, const struct iovec vector[], + libssh2_rsa_ctx *ctx); +RSA signs the SHA-1 hash computed over the count data chunks in vector. +Signature is stored at (sig, siglen). +Signature buffer must be allocated from the given session. +Returns 0 if OK, else -1. +Note: this procedure is optional: if provided, it MUST be defined as a macro. + +int _libssh2_rsa_sha1_sign(LIBSSH2_SESSION *session, + libssh2_rsa_ctx *rsactx, + const unsigned char *hash, + size_t hash_len, + unsigned char **signature, + size_t *signature_len); +RSA signs the (hash, hashlen) SHA-1 hash bytes and stores the allocated +signature at (signature, signature_len). +Signature buffer must be allocated from the given session. +Returns 0 if OK, else -1. +This procedure is already prototyped in crypto.h. +Note: this procedure is not used if macro _libssh2_rsa_sha1_signv() is defined. + +void _libssh2_rsa_free(libssh2_rsa_ctx *rsactx); +Releases the RSA computation context at rsactx. + + +6.2) DSA +LIBSSH2_DSA +#define as 1 if the crypto library supports DSA, else 0. +If defined as 0, the rest of this section can be omitted. + + +libssh2_dsa_ctx +Type of a DSA computation context. Generally a struct. + +int _libssh2_dsa_new(libssh2_dsa_ctx **dsa, + const unsigned char *pdata, + unsigned long plen, + const unsigned char *qdata, + unsigned long qlen, + const unsigned char *gdata, + unsigned long glen, + const unsigned char *ydata, + unsigned long ylen, + const unsigned char *x, unsigned long x_len); +Creates a new context for DSA computations from source key values: + pdata, plen Prime number p. Only used if private key known (ddata). + qdata, qlen Prime number q. Only used if private key known (ddata). + gdata, glen G number. + ydata, ylen Public key. + xdata, xlen Private key. Only taken if xlen non-zero. +Returns 0 if OK. +This procedure is already prototyped in crypto.h. + +int _libssh2_dsa_new_private(libssh2_dsa_ctx **dsa, + LIBSSH2_SESSION *session, + const char *filename, + unsigned const char *passphrase); +Gets a DSA private key from file filename into a new DSA context. +Must call _libssh2_init_if_needed(). +Return 0 if OK, else -1. +This procedure is already prototyped in crypto.h. + +int _libssh2_dsa_new_private_frommemory(libssh2_dsa_ctx **dsa, + LIBSSH2_SESSION *session, + const char *data, + size_t data_len, + unsigned const char *passphrase); +Gets a DSA private key from the data_len-bytes data into a new DSA context. +Must call _libssh2_init_if_needed(). +Returns 0 if OK, else -1. +This procedure is already prototyped in crypto.h. + +int _libssh2_dsa_sha1_verify(libssh2_dsa_ctx *dsactx, + const unsigned char *sig, + const unsigned char *m, unsigned long m_len); +Verify (sig, siglen) signature of (m, m_len) using an SHA1 hash and the +DSA context. +Returns 0 if OK, else -1. +This procedure is already prototyped in crypto.h. + +int _libssh2_dsa_sha1_sign(libssh2_dsa_ctx *dsactx, + const unsigned char *hash, + unsigned long hash_len, unsigned char *sig); +DSA signs the (hash, hash_len) data using SHA-1 and store the signature at sig. +Returns 0 if OK, else -1. +This procedure is already prototyped in crypto.h. + +void _libssh2_dsa_free(libssh2_dsa_ctx *dsactx); +Releases the DSA computation context at dsactx. + + +7) Miscellaneous + +void libssh2_prepare_iovec(struct iovec *vector, unsigned int len); +Prepare len consecutive iovec slots before using them. +In example, this is needed to preset unused structure slacks on platforms +requiring it. +If this is not needed, it should be defined as an empty macro. + +void _libssh2_random(unsigned char *buf, int len); +Store len random bytes at buf. diff --git a/vendor/libssh2/docs/INSTALL_AUTOTOOLS b/vendor/libssh2/docs/INSTALL_AUTOTOOLS new file mode 100644 index 000000000..bc5a0eb25 --- /dev/null +++ b/vendor/libssh2/docs/INSTALL_AUTOTOOLS @@ -0,0 +1,325 @@ +Installation Instructions +************************* + +Copyright (C) 1994, 1995, 1996, 1999, 2000, 2001, 2002, 2004, 2005 Free +Software Foundation, Inc. + +This file is free documentation; the Free Software Foundation gives +unlimited permission to copy, distribute and modify it. + +Basic Installation +================== + +These are generic installation instructions. + + The `configure' shell script attempts to guess correct values for +various system-dependent variables used during compilation. It uses +those values to create a `Makefile' in each directory of the package. +It may also create one or more `.h' files containing system-dependent +definitions. Finally, it creates a shell script `config.status' that +you can run in the future to recreate the current configuration, and a +file `config.log' containing compiler output (useful mainly for +debugging `configure'). + + It can also use an optional file (typically called `config.cache' +and enabled with `--cache-file=config.cache' or simply `-C') that saves +the results of its tests to speed up reconfiguring. (Caching is +disabled by default to prevent problems with accidental use of stale +cache files.) + + If you need to do unusual things to compile the package, please try +to figure out how `configure' could check whether to do them, and mail +diffs or instructions to the address given in the `README' so they can +be considered for the next release. If you are using the cache, and at +some point `config.cache' contains results you don't want to keep, you +may remove or edit it. + + The file `configure.ac' (or `configure.in') is used to create +`configure' by a program called `autoconf'. You only need +`configure.ac' if you want to change it or regenerate `configure' using +a newer version of `autoconf'. + +The simplest way to compile this package is: + + 1. `cd' to the directory containing the package's source code and type + `./configure' to configure the package for your system. If you're + using `csh' on an old version of System V, you might need to type + `sh ./configure' instead to prevent `csh' from trying to execute + `configure' itself. + + Running `configure' takes awhile. While running, it prints some + messages telling which features it is checking for. + + 2. Type `make' to compile the package. + + 3. Optionally, type `make check' to run any self-tests that come with + the package. + + 4. Type `make install' to install the programs and any data files and + documentation. + + 5. You can remove the program binaries and object files from the + source code directory by typing `make clean'. To also remove the + files that `configure' created (so you can compile the package for + a different kind of computer), type `make distclean'. There is + also a `make maintainer-clean' target, but that is intended mainly + for the package's developers. If you use it, you may have to get + all sorts of other programs in order to regenerate files that came + with the distribution. + +Compilers and Options +===================== + +Some systems require unusual options for compilation or linking that the +`configure' script does not know about. Run `./configure --help' for +details on some of the pertinent environment variables. + + You can give `configure' initial values for configuration parameters +by setting variables in the command line or in the environment. Here +is an example: + + ./configure CC=c89 CFLAGS=-O2 LIBS=-lposix + + *Note Defining Variables::, for more details. + +Compiling For Multiple Architectures +==================================== + +You can compile the package for more than one kind of computer at the +same time, by placing the object files for each architecture in their +own directory. To do this, you must use a version of `make' that +supports the `VPATH' variable, such as GNU `make'. `cd' to the +directory where you want the object files and executables to go and run +the `configure' script. `configure' automatically checks for the +source code in the directory that `configure' is in and in `..'. + + If you have to use a `make' that does not support the `VPATH' +variable, you have to compile the package for one architecture at a +time in the source code directory. After you have installed the +package for one architecture, use `make distclean' before reconfiguring +for another architecture. + +Installation Names +================== + +By default, `make install' installs the package's commands under +`/usr/local/bin', include files under `/usr/local/include', etc. You +can specify an installation prefix other than `/usr/local' by giving +`configure' the option `--prefix=PREFIX'. + + You can specify separate installation prefixes for +architecture-specific files and architecture-independent files. If you +pass the option `--exec-prefix=PREFIX' to `configure', the package uses +PREFIX as the prefix for installing programs and libraries. +Documentation and other data files still use the regular prefix. + + In addition, if you use an unusual directory layout you can give +options like `--bindir=DIR' to specify different values for particular +kinds of files. Run `configure --help' for a list of the directories +you can set and what kinds of files go in them. + + If the package supports it, you can cause programs to be installed +with an extra prefix or suffix on their names by giving `configure' the +option `--program-prefix=PREFIX' or `--program-suffix=SUFFIX'. + +Optional Features +================= + +Some packages pay attention to `--enable-FEATURE' options to +`configure', where FEATURE indicates an optional part of the package. +They may also pay attention to `--with-PACKAGE' options, where PACKAGE +is something like `gnu-as' or `x' (for the X Window System). The +`README' should mention any `--enable-' and `--with-' options that the +package recognizes. + + For packages that use the X Window System, `configure' can usually +find the X include and library files automatically, but if it doesn't, +you can use the `configure' options `--x-includes=DIR' and +`--x-libraries=DIR' to specify their locations. + +Specifying the System Type +========================== + +There may be some features `configure' cannot figure out automatically, +but needs to determine by the type of machine the package will run on. +Usually, assuming the package is built to be run on the _same_ +architectures, `configure' can figure that out, but if it prints a +message saying it cannot guess the machine type, give it the +`--build=TYPE' option. TYPE can either be a short name for the system +type, such as `sun4', or a canonical name which has the form: + + CPU-COMPANY-SYSTEM + +where SYSTEM can have one of these forms: + + OS KERNEL-OS + + See the file `config.sub' for the possible values of each field. If +`config.sub' isn't included in this package, then this package doesn't +need to know the machine type. + + If you are _building_ compiler tools for cross-compiling, you should +use the option `--target=TYPE' to select the type of system they will +produce code for. + + If you want to _use_ a cross compiler, that generates code for a +platform different from the build platform, you should specify the +"host" platform (i.e., that on which the generated programs will +eventually be run) with `--host=TYPE'. + +Sharing Defaults +================ + +If you want to set default values for `configure' scripts to share, you +can create a site shell script called `config.site' that gives default +values for variables like `CC', `cache_file', and `prefix'. +`configure' looks for `PREFIX/share/config.site' if it exists, then +`PREFIX/etc/config.site' if it exists. Or, you can set the +`CONFIG_SITE' environment variable to the location of the site script. +A warning: not all `configure' scripts look for a site script. + +Defining Variables +================== + +Variables not defined in a site shell script can be set in the +environment passed to `configure'. However, some packages may run +configure again during the build, and the customized values of these +variables may be lost. In order to avoid this problem, you should set +them in the `configure' command line, using `VAR=value'. For example: + + ./configure CC=/usr/local2/bin/gcc + +causes the specified `gcc' to be used as the C compiler (unless it is +overridden in the site shell script). Here is a another example: + + /bin/bash ./configure CONFIG_SHELL=/bin/bash + +Here the `CONFIG_SHELL=/bin/bash' operand causes subsequent +configuration-related scripts to be executed by `/bin/bash'. + +`configure' Invocation +====================== + +`configure' recognizes the following options to control how it operates. + +`--help' +`-h' + Print a summary of the options to `configure', and exit. + +`--version' +`-V' + Print the version of Autoconf used to generate the `configure' + script, and exit. + +`--cache-file=FILE' + Enable the cache: use and save the results of the tests in FILE, + traditionally `config.cache'. FILE defaults to `/dev/null' to + disable caching. + +`--config-cache' +`-C' + Alias for `--cache-file=config.cache'. + +`--quiet' +`--silent' +`-q' + Do not print messages saying which checks are being made. To + suppress all normal output, redirect it to `/dev/null' (any error + messages will still be shown). + +`--srcdir=DIR' + Look for the package's source code in directory DIR. Usually + `configure' can determine that directory automatically. + +`configure' also accepts some other, not widely useful, options. Run +`configure --help' for more details. + +More configure options +====================== + +Some ./configure options deserve additional comments: + + * --enable-crypt-none + + The SSH2 Transport allows for unencrypted data + transmission using the "none" cipher. Because this is + such a huge security hole, it is typically disabled on + SSH2 implementations and is disabled in libssh2 by + default as well. + + Enabling this option will allow for "none" as a + negotiable method, however it still requires that the + method be advertized by the remote end and that no + more-preferable methods are available. + + * --enable-mac-none + + The SSH2 Transport also allows implementations to + forego a message authentication code. While this is + less of a security risk than using a "none" cipher, it + is still not recommended as disabling MAC hashes + removes a layer of security. + + Enabling this option will allow for "none" as a + negotiable method, however it still requires that the + method be advertized by the remote end and that no + more-preferable methods are available. + + * --disable-gex-new + + The diffie-hellman-group-exchange-sha1 (dh-gex) key + exchange method originally defined an exchange + negotiation using packet type 30 to request a + generation pair based on a single target value. Later + refinement of dh-gex provided for range and target + values. By default libssh2 will use the newer range + method. + + If you experience trouble connecting to an old SSH + server using dh-gex, try this option to fallback on + the older more reliable method. + + * --with-libgcrypt + * --without-libgcrypt + * --with-libgcrypt-prefix=DIR + + libssh2 can use the Libgcrypt library + (http://www.gnupg.org/) for cryptographic operations. + Either Libgcrypt or OpenSSL is required. + + Configure will attempt to locate Libgcrypt + automatically. + + If your installation of Libgcrypt is in another + location, specify it using --with-libgcrypt-prefix. + + * --with-openssl + * --without-openssl + * --with-libssl-prefix=[DIR] + + libssh2 can use the OpenSSL library + (http://www.openssl.org) for cryptographic operations. + Either Libgcrypt or OpenSSL is required. + + Configure will attempt to locate OpenSSL in the + default location. + + If your installation of OpenSSL is in another + location, specify it using --with-libssl-prefix. + + * --with-libz + * --without-libz + * --with-libz-prefix=[DIR] + + If present, libssh2 will attempt to use the zlib + (http://www.zlib.org) for payload compression, however + zlib is not required. + + If your installation of Libz is in another location, + specify it using --with-libz-prefix. + + * --enable-debug + + Will make the build use more pedantic and strict compiler + options as well as enable the libssh2_trace() function (for + showing debug traces). diff --git a/vendor/libssh2/docs/INSTALL_CMAKE b/vendor/libssh2/docs/INSTALL_CMAKE new file mode 100644 index 000000000..704037059 --- /dev/null +++ b/vendor/libssh2/docs/INSTALL_CMAKE @@ -0,0 +1,174 @@ +License: see COPYING + +Source code: https://github.com/libssh2/libssh2 + +Web site source code: https://github.com/libssh2/www + +Installation instructions are in docs/INSTALL +======= +To build libssh2 you will need CMake v2.8 or later [1] and one of the +following cryptography libraries: + +* OpenSSL +* Libgcrypt +* WinCNG + +Getting started +--------------- + +If you are happy with the default options, make a new build directory, +change to it, configure the build environment and build the project: + + mkdir bin + cd bin + cmake .. + cmake --build . + +libssh2 will be built as a static library and will use any +cryptography library available. The library binary will be put in +`bin/src`, with the examples in `bin/example` and the tests in +`bin/tests`. + +Customising the build +--------------------- + +Of course, you might want to customise the build options. You can +pass the options to CMake on the command line: + + cmake -D