{"_id":"_design/app","_rev":"717-d90479cd4db1ef794aac5c340fbaa2e0","views":{"norevs":{"map":"function (doc) {\n if (doc._revisions && doc._revisions.ids.length === 1 &&\n doc._revisions.start > 3) {\n // we have a problem\n emit(doc._id, 1)\n }\n}","reduce":"_sum"},"mixedcase":{"map":"function (doc) { \n if (doc.name.toLowerCase() !== doc.name) {\n emit(doc._id, doc.author)\n }\n}"},"conflicts":{"map":"function (doc) {\n if (doc._conflicts) {\n for (var i = 0; i < doc._conflicts.length; i++) {\n emit([doc._id, doc._conflicts[i]], 1)\n }\n }\n}","reduce":"_sum"},"oddhost":{"map":"function (doc) {\n Object.keys = Object.keys || function keys (o) {\n var a = []\n for (var i in o) a.push(i)\n return a }\n Array.prototype.forEach = Array.prototype.forEach || function forEach (fn) {\n for (var i = 0, l = this.length; i < l; i ++) {\n if (this.hasOwnProperty(i)) {\n fn(this[i], i, this)\n }\n }\n }\n\n if (!doc.versions || Object.keys(doc.versions).length === 0)\n return\n if (doc._id.match(/^npm-test-.+$/) &&\n doc.maintainers &&\n doc.maintainers[0].name === 'isaacs')\n return\n Object.keys(doc.versions).forEach(function (v) {\n var ver = doc.versions[v]\n if (!ver.dist.tarball.match(/^https?:\\/\\/registry.npmjs.org\\//)) {\n emit([doc._id, ver._id, ver.dist.tarball], 1)\n }\n })\n}","reduce":"_sum"},"updated":{"map":"function (doc) {\n var l = doc[\"dist-tags\"].latest\n , t = doc.time && doc.time[l]\n if (t) emit(t, 1)\n}"},"listAll":{"map":"function (doc) { return emit(doc._id, doc) }"},"allVersions":{"map":"function (doc) {\n if (!doc || !doc.versions)\n return\n for (var i in doc.versions)\n emit([i, doc._id], 1)\n}","reduce":"_sum"},"modified":{"map":"function (doc) {\n function parse (s) {\n // s is something like \"2010-12-29T07:31:06Z\"\n s = s.split(\"T\")\n var ds = s[0]\n , ts = s[1]\n , d = new Date()\n ds = ds.split(\"-\")\n ts = ts.split(\":\")\n var tz = ts[2].substr(2)\n ts[2] = ts[2].substr(0, 2)\n d.setUTCFullYear(+ds[0])\n d.setUTCMonth(+ds[1]-1)\n d.setUTCDate(+ds[2])\n d.setUTCHours(+ts[0])\n d.setUTCMinutes(+ts[1])\n d.setUTCSeconds(+ts[2])\n d.setUTCMilliseconds(0)\n return d.getTime()\n }\n if (!doc.versions || doc.deprecated) return\n if (doc._id.match(/^npm-test-.+$/) &&\n doc.maintainers &&\n doc.maintainers[0].name === 'isaacs')\n return\n var latest = doc[\"dist-tags\"].latest\n if (!doc.versions[latest]) return\n var time = doc.time && doc.time[latest] || 0\n var t = new Date(parse(time))\n emit(t.getTime(), doc)\n}"},"modifiedPackage":{"map":"function (doc) {\n function parse (s) {\n // s is something like \"2010-12-29T07:31:06Z\"\n s = s.split(\"T\")\n var ds = s[0]\n , ts = s[1]\n , d = new Date()\n ds = ds.split(\"-\")\n ts = ts.split(\":\")\n var tz = ts[2].substr(2)\n ts[2] = ts[2].substr(0, 2)\n d.setUTCFullYear(+ds[0])\n d.setUTCMonth(+ds[1]-1)\n d.setUTCDate(+ds[2])\n d.setUTCHours(+ts[0])\n d.setUTCMinutes(+ts[1])\n d.setUTCSeconds(+ts[2])\n d.setUTCMilliseconds(0)\n return d.getTime()\n }\n if (!doc.versions || doc.deprecated) return\n if (doc._id.match(/^npm-test-.+$/) &&\n doc.maintainers &&\n doc.maintainers[0].name === 'isaacs')\n return\n var latest = doc[\"dist-tags\"].latest\n if (!doc.versions[latest]) return\n var time = doc.time && doc.time[latest] || 0\n var t = new Date(parse(time))\n emit([doc._id, t.getTime()], doc)\n}"},"noShasum":{"map":"function (doc) {\n if (!doc || !doc.versions)\n return\n\n for (var ver in doc.versions) {\n var version = doc.versions[ver]\n if (!version || !version.dist || !version.dist.shasum) {\n emit([doc.name, ver, !!version, !!version.dist, !!version.shasum], 1)\n }\n }\n}","reduce":"_sum"},"byEngine":{"map":"function (doc) {\n if (!doc || !doc.versions || !doc[\"dist-tags\"] || doc.deprecated) return\n if (doc._id.match(/^npm-test-.+$/) &&\n doc.maintainers &&\n doc.maintainers[0].name === 'isaacs')\n return\n var v = doc[\"dist-tags\"].latest\n var d = doc.versions[v]\n if (d && d.engines) emit(doc._id, [d.engines, doc.maintainers])\n }"},"countVersions":{"map":"function (doc) {\n if (!doc || !doc.name || doc.deprecated) return\n if (doc._id.match(/^npm-test-.+$/) &&\n doc.maintainers &&\n doc.maintainers[0].name === 'isaacs')\n return\n var i = 0\n if (!doc.versions) return emit([i, doc._id], 1)\n for (var v in doc.versions) i++\n emit([i, doc._id], 1)\n}","reduce":"_sum"},"byKeyword":{"map":"function (doc) {\n Array.isArray = Array.isArray || function isArray (a) {\n return a instanceof Array\n || Object.prototype.toString.call(a) === \"[object Array]\"\n || (typeof a === \"object\" && typeof a.length === \"number\") }\n Array.prototype.forEach = Array.prototype.forEach || function forEach (fn) {\n for (var i = 0, l = this.length; i < l; i ++) {\n if (this.hasOwnProperty(i)) {\n fn(this[i], i, this)\n }\n }\n }\n if (!doc || !doc.versions || !doc['dist-tags'] || doc.deprecated) return\n if (doc._id.match(/^npm-test-.+$/) &&\n doc.maintainers &&\n doc.maintainers[0].name === 'isaacs')\n return\n var v = doc.versions[doc['dist-tags'].latest]\n if (!v || !v.keywords || !Array.isArray(v.keywords)) return\n v.keywords.forEach(function (kw) {\n emit([kw.toLowerCase(), doc.name, doc.description], 1)\n })\n }","reduce":"_sum"},"byField":{"map":"function (doc) {\n Object.keys = Object.keys || function keys (o) {\n var a = []\n for (var i in o) a.push(i)\n return a }\n Array.prototype.forEach = Array.prototype.forEach || function forEach (fn) {\n for (var i = 0, l = this.length; i < l; i ++) {\n if (this.hasOwnProperty(i)) {\n fn(this[i], i, this)\n }\n }\n }\n\n if (!doc || !doc.versions || !doc[\"dist-tags\"]) return\n if (doc._id.match(/^npm-test-.+$/) &&\n doc.maintainers &&\n doc.maintainers[0].name === 'isaacs')\n return\n var v = doc[\"dist-tags\"].latest\n //Object.keys(doc.versions).forEach(function (v) {\n var d = doc.versions[v]\n if (!d) return\n //emit(d.name + \"@\" + d.version, d.dist.bin || {})\n var out = {}\n for (var i in d) {\n out[i] = d[i] //true\n if (d[i] && typeof d[i] === \"object\" &&\n (i === \"scripts\" || i === \"directories\")) {\n for (var j in d[i]) out[i + \".\" + j] = d[i][j]\n }\n }\n out.maintainers = doc.maintainers\n emit(doc._id, out)\n //})\n }"},"needBuild":{"map":"function (doc) {\n\n Object.keys = Object.keys || function keys (o) {\n var a = []\n for (var i in o) a.push(i)\n return a }\n Array.prototype.forEach = Array.prototype.forEach || function forEach (fn) {\n for (var i = 0, l = this.length; i < l; i ++) {\n if (this.hasOwnProperty(i)) {\n fn(this[i], i, this)\n }\n }\n }\n\n if (!doc || !doc.versions || !doc[\"dist-tags\"]) return\n if (doc._id.match(/^npm-test-.+$/) &&\n doc.maintainers &&\n doc.maintainers[0].name === 'isaacs')\n return\n var v = doc[\"dist-tags\"].latest\n //Object.keys(doc.versions).forEach(function (v) {\n var d = doc.versions[v]\n if (!d) return\n if (!d.scripts) return\n var inst = d.scripts.install\n || d.scripts.preinstall\n || d.scripts.postinstall\n if (!inst) return\n //emit(d.name + \"@\" + d.version, d.dist.bin || {})\n emit(d._id, d.dist.bin || {})\n //})\n }"},"scripts":{"map":"function (doc) {\n if (!doc || !doc.versions || !doc[\"dist-tags\"]) return\n if (doc._id.match(/^npm-test-.+$/) &&\n doc.maintainers &&\n doc.maintainers[0].name === 'isaacs')\n return\n var v = doc[\"dist-tags\"].latest\n v = doc.versions[v]\n if (!v || !v.scripts) return\n var out = {}\n var any = false\n for (var i in v.scripts) {\n out[i] = v.scripts[i]\n any = true\n }\n if (!any) return\n out.maintainers = doc.maintainers\n emit(doc._id, out)\n }"},"nodeWafInstall":{"map":"function (doc) {\n if (!doc || !doc.versions || !doc[\"dist-tags\"]) return\n if (doc._id.match(/^npm-test-.+$/) &&\n doc.maintainers &&\n doc.maintainers[0].name === 'isaacs')\n return\n var v = doc[\"dist-tags\"].latest\n if (!doc.versions[v]) return\n if (!doc.versions[v].scripts) return\n for (var i in doc.versions[v].scripts) {\n if (typeof doc.versions[v].scripts[i] === 'string' &&\n (doc.versions[v].scripts[i].indexOf(\"node-waf\") !== -1 ||\n doc.versions[v].scripts[i].indexOf(\"make\") !== -1)) {\n emit(doc._id, doc.versions[v]._id)\n return\n }\n }\n }"},"orphanAttachments":{"map":"function (doc) {\n if (!doc || !doc._attachments) return\n var orphans = []\n , size = 0\n for (var i in doc._attachments) {\n var n = i.substr(doc._id.length + 1).replace(/\\.tgz$/, \"\")\n .replace(/^v/, \"\")\n if (!doc.versions[n] && i.match(/\\.tgz$/)) {\n orphans.push(i)\n size += doc._attachments[i].length\n }\n }\n if (orphans.length) emit(doc._id, {size:size, orphans:orphans})\n }"},"noAttachment":{"map":"function (doc) {\n if (!doc || !doc._id) return\n var att = doc._attachments || {}\n var versions = doc.versions || {}\n var missing = []\n for (var i in versions) {\n var v = versions[i]\n if (!v.dist || !v.dist.tarball) {\n emit([doc._id, i, null], 1)\n continue\n }\n var f = v.dist.tarball.match(/([^\\/]+\\.tgz$)/)\n if (!f) {\n emit([doc._id, i, v.dist.tarball], 1)\n continue\n }\n f = f[1]\n if (!f || !att[f]) {\n emit([doc._id, i, v.dist.tarball], 1)\n continue\n }\n }\n }","reduce":"_sum"},"starredByUser":{"map":"function (doc) {\n Object.keys = Object.keys || function keys (o) {\n var a = []\n for (var i in o) a.push(i)\n return a }\n Array.prototype.forEach = Array.prototype.forEach || function forEach (fn) {\n for (var i = 0, l = this.length; i < l; i ++) {\n if (this.hasOwnProperty(i)) {\n fn(this[i], i, this)\n }\n }\n }\n\n if (!doc || !doc.users) return\n if (doc._id.match(/^npm-test-.+$/) && doc.maintainers[0].name === 'isaacs')\n return\n Object.keys(doc.users).forEach(function (m) {\n if (!doc.users[m]) return\n emit(m, doc._id)\n })\n}"},"starredByPackage":{"map":"function (doc) {\n Object.keys = Object.keys || function keys (o) {\n var a = []\n for (var i in o) a.push(i)\n return a }\n\n Array.prototype.forEach = Array.prototype.forEach || function forEach (fn) {\n for (var i = 0, l = this.length; i < l; i ++) {\n if (this.hasOwnProperty(i)) {\n fn(this[i], i, this)\n }\n }\n }\n\n if (!doc || !doc.users) return\n if (doc._id.match(/^npm-test-.+$/) &&\n doc.maintainers &&\n doc.maintainers[0].name === 'isaacs')\n return\n Object.keys(doc.users).forEach(function (m) {\n if (!doc.users[m]) return\n emit(doc._id, m)\n })\n}"},"byUser":{"map":"function (doc) {\n if (!doc || !doc.maintainers) return\n if (doc._id.match(/^npm-test-.+$/) &&\n doc.maintainers &&\n doc.maintainers[0].name === 'isaacs')\n return\n Array.prototype.forEach = Array.prototype.forEach || function forEach (fn) {\n for (var i = 0, l = this.length; i < l; i ++) {\n if (this.hasOwnProperty(i)) {\n fn(this[i], i, this)\n }\n }\n }\n doc.maintainers.forEach(function (m) {\n emit(m.name, doc._id)\n })\n}"},"browseAuthorsRecent":{"map":"function (doc) {\n Array.prototype.forEach = Array.prototype.forEach || function forEach (fn) {\n for (var i = 0, l = this.length; i < l; i ++) {\n if (this.hasOwnProperty(i)) {\n fn(this[i], i, this)\n }\n }\n }\n if (!doc || !doc.maintainers || doc.deprecated) return\n if (doc._id.match(/^npm-test-.+$/) &&\n doc.maintainers &&\n doc.maintainers[0].name === 'isaacs')\n return\n var l = doc['dist-tags'] && doc['dist-tags'].latest\n l = l && doc.versions && doc.versions[l]\n if (!l || l.deprecated) return\n var t = doc.time && doc.time[l.version]\n if (!t) return\n var desc = doc.description || l.description || ''\n if (l._npmUser) {\n // emit the user who published most recently.\n var m = l._npmUser\n emit([t, m.name, doc._id, desc], 1)\n } else {\n // just emit all maintainers, since we don't know who published last\n doc.maintainers.forEach(function (m) {\n emit([t, m.name, doc._id, desc], 1)\n })\n }\n}","reduce":"_sum"},"npmTop":{"map":"function (doc) {\n Array.prototype.forEach = Array.prototype.forEach || function forEach (fn) {\n for (var i = 0, l = this.length; i < l; i ++) {\n if (this.hasOwnProperty(i)) {\n fn(this[i], i, this)\n }\n }\n }\n if (!doc || !doc.maintainers || doc.deprecated) return\n if (doc._id.match(/^npm-test-.+$/) &&\n doc.maintainers &&\n doc.maintainers[0].name === 'isaacs')\n return\n var l = doc['dist-tags'] && doc['dist-tags'].latest\n l = l && doc.versions && doc.versions[l]\n if (!l || l.deprecated) return\n var t = doc.time && doc.time[l.version]\n if (!t) return\n var desc = doc.description || l.description || ''\n doc.maintainers.forEach(function (m) {\n emit([m.name, doc._id, desc, t], 1)\n })\n}","reduce":"_sum"},"browseAuthors":{"map":"function (doc) {\n Array.prototype.forEach = Array.prototype.forEach || function forEach (fn) {\n for (var i = 0, l = this.length; i < l; i ++) {\n if (this.hasOwnProperty(i)) {\n fn(this[i], i, this)\n }\n }\n }\n if (!doc || !doc.maintainers || doc.deprecated) return\n if (doc._id.match(/^npm-test-.+$/) &&\n doc.maintainers &&\n doc.maintainers[0].name === 'isaacs')\n return\n var l = doc['dist-tags'] && doc['dist-tags'].latest\n l = l && doc.versions && doc.versions[l]\n if (!l || l.deprecated) return\n var t = doc.time && doc.time[l.version]\n if (!t) return\n var desc = doc.description || l.description || ''\n doc.maintainers.forEach(function (m) {\n emit([m.name, doc._id, desc, t], 1)\n })\n}","reduce":"_sum"},"browseUpdated":{"map":"function (doc) {\n function parse (s) {\n // s is something like \"2010-12-29T07:31:06Z\"\n s = s.split(\"T\")\n var ds = s[0]\n , ts = s[1]\n , d = new Date()\n ds = ds.split(\"-\")\n ts = ts.split(\":\")\n var tz = ts[2].substr(2)\n ts[2] = ts[2].substr(0, 2)\n d.setUTCFullYear(+ds[0])\n d.setUTCMonth(+ds[1]-1)\n d.setUTCDate(+ds[2])\n d.setUTCHours(+ts[0])\n d.setUTCMinutes(+ts[1])\n d.setUTCSeconds(+ts[2])\n d.setUTCMilliseconds(0)\n return d.getTime()\n }\n\n if (!doc || !doc.versions || doc.deprecated) return\n if (doc._id.match(/^npm-test-.+$/) &&\n doc.maintainers &&\n doc.maintainers[0].name === 'isaacs')\n return\n var l = doc['dist-tags'] && doc['dist-tags'].latest\n if (!l || l.deprecated) return\n var t = doc.time && doc.time[l]\n if (!t) return\n var v = doc.versions[l]\n if (!v) return\n var d = new Date(parse(t))\n if (!d.getTime()) return\n\n function pad(n){return n<10 ? '0'+n : n}\n Date.prototype.toISOString = Date.prototype.toISOString ||\n function toISOString(){\n var d = this;\n return d.getUTCFullYear()+'-'\n + pad(d.getUTCMonth()+1)+'-'\n + pad(d.getUTCDate())+'T'\n + pad(d.getUTCHours())+':'\n + pad(d.getUTCMinutes())+':'\n + pad(d.getUTCSeconds())+'Z'}\n\n emit([ d.toISOString(),\n doc._id,\n v.description ], 1)\n}","reduce":"_sum"},"browseAll":{"map":"function (doc) {\n if (!doc || !doc.versions || doc.deprecated) return\n if (doc._id.match(/^npm-test-.+$/) &&\n doc.maintainers &&\n doc.maintainers[0].name === 'isaacs')\n return\n var l = doc['dist-tags'] && doc['dist-tags'].latest\n if (!l) return\n l = doc.versions && doc.versions[l]\n if (!l || l.deprecated) return\n var desc = doc.description || l.description || ''\n emit([doc.name, desc], 1)\n}","reduce":"_sum"},"analytics":{"map":"function (doc) {\n function parse (s) {\n // s is something like \"2010-12-29T07:31:06Z\"\n s = s.split(\"T\")\n var ds = s[0]\n , ts = s[1]\n , d = new Date()\n ds = ds.split(\"-\")\n ts = ts.split(\":\")\n var tz = ts[2].substr(2)\n ts[2] = ts[2].substr(0, 2)\n d.setUTCFullYear(+ds[0])\n d.setUTCMonth(+ds[1]-1)\n d.setUTCDate(+ds[2])\n d.setUTCHours(+ts[0])\n d.setUTCMinutes(+ts[1])\n d.setUTCSeconds(+ts[2])\n d.setUTCMilliseconds(0)\n return d.getTime()\n }\n if (!doc || !doc.time || doc.deprecated) return\n if (doc._id.match(/^npm-test-.+$/) &&\n doc.maintainers &&\n doc.maintainers[0].name === 'isaacs')\n return\n for (var i in doc.time) {\n var t = doc.time[i]\n var d = new Date(parse(t))\n if (!d.getTime()) return\n var type = i === 'modified' ? 'latest'\n : i === 'created' ? 'created'\n : 'update'\n emit([ type,\n d.getUTCFullYear(),\n d.getUTCMonth() + 1,\n d.getUTCDate(),\n doc._id ], 1)\n }\n}","reduce":"_sum"},"dependedUpon":{"map":"function (doc) {\n if (!doc || doc.deprecated) return\n if (doc._id.match(/^npm-test-.+$/) &&\n doc.maintainers &&\n doc.maintainers[0].name === 'isaacs')\n return\n var l = doc['dist-tags'] && doc['dist-tags'].latest\n if (!l) return\n l = doc.versions && doc.versions[l]\n if (!l || l.deprecated) return\n var desc = doc.description || l.description || ''\n var d = l.dependencies\n if (!d) return\n for (var dep in d) {\n emit([dep, doc._id, desc], 1)\n }\n}","reduce":"_sum"},"dependentVersions":{"map":"function (doc) {\n if (!doc || doc.deprecated) return\n if (doc._id.match(/^npm-test-.+$/) &&\n doc.maintainers &&\n doc.maintainers[0].name === 'isaacs')\n return\n var l = doc['dist-tags'] && doc['dist-tags'].latest\n if (!l) return\n l = doc.versions && doc.versions[l]\n if (!l || l.deprecated) return\n var deps = l.dependencies\n if (!deps) return\n for (var dep in deps)\n emit([dep, deps[dep], doc._id], 1)\n}","reduce":"_sum"},"browseStarUser":{"map":"function (doc) {\n if (!doc) return\n if (doc._id.match(/^npm-test-.+$/) &&\n doc.maintainers &&\n doc.maintainers[0].name === 'isaacs')\n return\n var l = doc['dist-tags'] && doc['dist-tags'].latest\n if (!l) return\n l = doc.versions && doc.versions[l]\n if (!l || l.deprecated) return\n var desc = doc.description || l.description || ''\n var d = doc.users\n if (!d) return\n for (var user in d) {\n emit([user, doc._id, desc], 1)\n }\n}","reduce":"_sum"},"browseStarPackage":{"map":"function (doc) {\n if (!doc || doc.deprecated) return\n if (doc._id.match(/^npm-test-.+$/) &&\n doc.maintainers &&\n doc.maintainers[0].name === 'isaacs')\n return\n var l = doc['dist-tags'] && doc['dist-tags'].latest\n if (!l) return\n l = doc.versions && doc.versions[l]\n if (!l || l.deprecated) return\n var desc = doc.description || l.description || ''\n var d = doc.users\n if (!d) return\n for (var user in d) {\n emit([doc._id, desc, user], 1)\n }\n}","reduce":"_sum"},"fieldsInUse":{"map":"function (doc) {\n if (!doc.versions || !doc[\"dist-tags\"] || !doc[\"dist-tags\"].latest || doc.deprecated) {\n return\n }\n if (doc._id.match(/^npm-test-.+$/) &&\n doc.maintainers &&\n doc.maintainers[0].name === 'isaacs')\n return\n var d = doc.versions[doc[\"dist-tags\"].latest]\n if (!d) return\n for (var f in d) {\n emit(f, 1)\n if (d[f] && typeof d[f] === \"object\" &&\n (f === \"scripts\" || f === \"directories\")) {\n for (var i in d[f]) emit(f+\".\"+i, 1)\n }\n }\n}","reduce":"_sum"}},"shows":{"notImplemented":"function (doc, req) {\n return {\n code: 501,\n headers: {\n \"content-type\": \"application/json\"\n },\n body: JSON.stringify({\n error: \"Not Implemented\",\n reason: \"This server does not support this endpoint\"\n })\n }\n}","whoami":"function (doc, req) {\n return {\n code: 200,\n headers: {\n \"content-type\": \"application/json\"\n },\n body: JSON.stringify({\n username: req.userCtx.name\n })\n }\n}","distTags":"function (doc, req) {\n return {\n code: 200,\n headers: {\n \"content-type\": \"application/json\"\n },\n body: JSON.stringify(doc[\"dist-tags\"])\n }\n}","package":"function (doc, req) {\n require(\"monkeypatch\").patch(Object, Date, Array, String)\n\n var semver = require(\"semver\")\n , code = 200\n , headers = {\"Content-Type\":\"application/json\"}\n , body = null\n\n delete doc.ctime\n delete doc.mtime\n if (!doc._attachments) doc._attachments = {}\n\n if (doc.time && doc.time.unpublished) {\n delete doc._revisions\n return {\n code : 404,\n body : JSON.stringify(doc),\n headers : headers\n }\n }\n\n if (doc.versions) Object.keys(doc.versions).forEach(function (v) {\n delete doc.versions[v].ctime\n delete doc.versions[v].mtime\n })\n\n // legacy kludge\n if (doc.versions) for (var v in doc.versions) {\n var clean = semver.clean(v, true)\n doc.versions[v].directories = doc.versions[v].directories || {}\n if (clean !== v) {\n var p = doc.versions[v]\n delete doc.versions[v]\n p.version = v = clean\n p._id = p.name + '@' + p.version\n doc.versions[clean] = p\n }\n\n if (doc.versions[v].dist.tarball) {\n // if there is an attachment for this tarball, then use that.\n // make it point at THIS registry that is being requested,\n // with the full /db/_design/app/_rewrite if that is being used,\n // or just the /name if not.\n\n var t = doc.versions[v].dist.tarball\n t = t.replace(/^https?:\\/\\/[^\\/:]+(:[0-9]+)?/, '')\n var f = t.match(/[^\\/]+$/)[0]\n var requestedPath = req.requested_path\n if (doc._attachments && doc._attachments[f]) {\n // workaround for old couch versions that didn't\n // have requested_path\n if (requestedPath && -1 === requestedPath.indexOf('show'))\n requestedPath = requestedPath.slice(0)\n else {\n var path = req.path\n if (path) {\n var i = path.indexOf('_show')\n if (i !== -1) {\n requestedPath = path.slice(0)\n requestedPath.splice(i, i + 2, '_rewrite')\n }\n } else return {\n code : 500,\n body : JSON.stringify({error: 'bad couch'}),\n headers : headers\n }\n }\n\n // doc.versions[v].dist._origTarball = doc.versions[v].dist.tarball\n // doc.versions[v].dist._headers = req.headers\n // doc.versions[v].dist._query = req.query\n // doc.versions[v].dist._reqPath = req.requested_path\n // doc.versions[v].dist._path = req.path\n // doc.versions[v].dist._t = t.slice(0)\n\n // actual location of tarball should always be:\n // .../_rewrite/pkg/-/pkg-version.tgz\n // or: /pkg/-/pkg-version.tgz\n // depending on what requested path is.\n var tf = [doc.name, '-', t.split('/').pop()]\n var i = requestedPath.indexOf('_rewrite')\n if (i !== -1) {\n tf = requestedPath.slice(0, i + 1).concat(tf)\n }\n t = '/' + tf.join('/')\n var proto = req.headers['X-Forwarded-Proto'] || 'http';\n var h = proto + \"://\" + ( req.headers['X-Forwarded-Host'] || req.headers.Host );\n\n doc.versions[v].dist.tarball = h + t\n }\n }\n }\n if (doc[\"dist-tags\"]) for (var tag in doc[\"dist-tags\"]) {\n var clean = semver.clean(doc[\"dist-tags\"][tag], true)\n if (!clean) delete doc[\"dist-tags\"][tag]\n else doc[\"dist-tags\"][tag] = clean\n }\n // end kludge\n\n\n if (req.query.version) {\n // could be either one!\n // if it's a fuzzy version or a range, use the max satisfying version\n var ver = req.query.version\n var clean = semver.maxSatisfying(Object.keys(doc.versions), ver, true)\n\n if (clean && clean !== ver && (clean in doc.versions))\n ver = clean\n\n // if not a valid version, then treat as a tag.\n if ((!(ver in doc.versions) && (ver in doc[\"dist-tags\"]))\n || !semver.valid(ver)) {\n ver = doc[\"dist-tags\"][ver]\n }\n body = doc.versions[ver]\n if (!body) {\n code = 404\n body = {\"error\" : \"version not found: \"+req.query.version}\n }\n } else {\n body = doc\n delete body._revisions\n }\n\n body = req.query.jsonp\n ? req.query.jsonp + \"(\" + JSON.stringify(body) + \")\"\n : toJSON(body)\n\n return {\n code : code,\n body : body,\n headers : headers\n }\n}","new_package":"function (doc, req) {\n var semver = require(\"semver\")\n , code = 200\n , headers = {\"Content-Type\":\"application/json\"}\n , body = null\n\n if (!doc._attachments) doc._attachments = {}\n\n if (req.query.version) {\n // could be either one!\n // if it's a fuzzy version or a range, use the max satisfying version\n var ver = req.query.version\n var clean = semver.maxSatisfying(Object.keys(doc.versions), ver, true)\n\n if (clean && clean !== ver && (clean in doc.versions))\n ver = clean\n\n // if not a valid version, then treat as a tag.\n if ((!(ver in doc.versions) && (ver in doc[\"dist-tags\"]))\n || !semver.valid(ver)) {\n ver = doc[\"dist-tags\"][ver]\n }\n body = doc.versions[ver]\n if (!body) {\n code = 404\n body = {\"error\" : \"version not found: \"+req.query.version}\n }\n } else {\n body = doc\n }\n\n body = toJSON(body)\n\n return {\n code : code,\n body : body,\n headers : headers\n }\n}","ping":"function (doc, req) {\n var code = 200\n , headers = {\"Content-Type\":\"application/json\"}\n\n var body = {\n host: req.headers[\"Host\"]\n , ok: true\n , username: req.userCtx.name\n , peer: req.peer\n }\n\n body = toJSON(body)\n\n return {\n code : code\n , body : body\n , headers : headers\n }\n}"},"updates":{"distTags":"function (doc, req) {\n var dt = doc['dist-tags']\n var versions = doc.versions\n\n function error (message) {\n return [ { _id: \".error.\", forbidden: message },\n JSON.stringify({ error: message }) ]\n }\n\n function ok () {\n return [ doc, JSON.stringify({ ok: \"dist-tags updated\" }) ]\n }\n\n if (!dt || !versions)\n return error(\"bad document: no dist-tags or no versions\")\n\n if (req.body)\n var data = JSON.parse(req.body)\n\n var tag = req.query.tag\n switch (req.method) {\n case \"DELETE\":\n if (!tag)\n return error(\"tag param required\")\n delete dt[tag]\n return ok()\n\n case \"PUT\":\n case \"POST\":\n if (typeof data === \"string\") {\n if (!tag)\n return error(\"tag param required when setting single dist-tag\")\n dt[tag] = data\n\n } else if (data && typeof data === \"object\") {\n if (tag)\n return error(\"must not provide tag param when setting multiple dist-tags\")\n\n if (req.param === \"PUT\")\n doc[\"dist-tags\"] = data\n else for (var tag in data)\n dt[tag] = data[tag]\n\n } else {\n return error(\"unknown data type\")\n }\n return ok()\n default:\n return error(\"unknown request method: \" + req.method)\n }\n}","delete":"function (doc, req) {\n if (req.method !== \"DELETE\")\n return [ { _id: \".error.\", forbidden: \"Method not allowed\" },\n { error: \"method not allowed\" } ]\n\n require(\"monkeypatch\").patch(Object, Date, Array, String)\n\n var dt = doc['dist-tags']\n var lv = dt && dt.latest\n var latest = lv && doc.versions && doc.versions[lv]\n\n var t = doc.time || {}\n t.unpublished = {\n name: req.userCtx.name,\n time: new Date().toISOString(),\n tags: dt || {},\n maintainers: doc.maintainers,\n description: latest && latest.description || doc.description,\n versions: Object.keys(doc.versions || {})\n }\n\n return [ {\n _id: doc._id,\n _rev: doc._rev,\n name: doc._id,\n time: t\n }, JSON.stringify({ ok: \"deleted\" }) ]\n}","metadata":"function (doc, req) {\n var data = JSON.parse(req.body)\n\n for (var i in data) {\n if (i !== '_rev' &&\n i !== 'maintainers' &&\n i !== 'versions' &&\n (typeof data[i] === 'string' || i === 'keywords'))\n doc[i] = data[i]\n }\n\n doc.time.modified = new Date().toISOString()\n\n return [doc, JSON.stringify({ok: \"updated metadata\" })]\n}","star":"function (doc, req) {\n var username = JSON.parse(req.body)\n\n if (!doc.users) doc.users = {}\n\n doc.users[username] = true\n\n return [doc, JSON.stringify({ok: username + ' has starred ' + doc.name})]\n}","unstar":"function (doc, req) {\n var username = JSON.parse(req.body)\n\n if (!doc.users) return [doc, JSON.stringify({ok: doc.name + ' has no users'})]\n\n delete doc.users[username]\n\n return [doc, JSON.stringify({ok: username + ' has unstarred ' + doc.name})]\n}","package":"function (doc, req) {\n require(\"monkeypatch\").patch(Object, Date, Array, String)\n var semver = require(\"semver\")\n var README_MAXLEN = 64 * 1024\n var body = JSON.parse(req.body)\n var deep = require(\"deep\")\n var deepEquals = deep.deepEquals\n var now = (new Date()).toISOString()\n\n\n // Sure would be nice if there was an easy way to toggle this in\n // couchdb somehow.\n var DEBUG = false\n if (typeof process === 'object' &&\n process &&\n process.env &&\n process.env === 'object')\n DEBUG = true\n\n var output = []\n var d\n if (typeof console === \"object\" &&\n typeof process === \"object\" &&\n typeof process.env === \"object\" &&\n /\\bvdu\\b/.test(process.env.NODE_DEBUG)) {\n d = console.error\n } else if (DEBUG)\n d = function() { output.push([].slice.apply(arguments)) }\n else\n d = function() {}\n\n if (!doc) {\n d('newDoc', body)\n return newDoc(body)\n } else if (req.query.version || req.query.tag) {\n d('legacyupdate', req.query)\n return legacyUpdate(doc, body, req.query)\n } else {\n d('updateDoc')\n return updateDoc(body, doc)\n }\n\n // unreachable\n return error(\"bug in update function. please report this.\")\n\n\n ////////////\n // methods\n\n function legacyUpdate(doc, body, query) {\n d('in legacyUpdate', body, query)\n // we know that there's already a document to merge into.\n // Figure out what we're trying to add into it.\n //\n // legacy npm clients would PUT the version to /:pkg/:version\n // tagging is done by PUT /:pkg/:tag with a \"version\" string\n if (typeof body === \"string\") {\n var tag = query.version\n var ver = body\n return addTag(tag, ver)\n }\n\n // adding a new version.\n return addNewVersion(body.version, body)\n }\n\n // return error(reason) to abort at any point.\n // the vdu will not allow this _id, and will throw\n // the \"forbidden\" value.\n function error (reason) {\n if (output.length) {\n reason += \"\\n\" + output.map(function(n) {\n return n.map(function(a) {\n return JSON.stringify(a)\n }).join(\" \")\n }).join(\"\\n\")\n }\n return [{\n _id: \".error.\",\n forbidden: reason\n }, JSON.stringify({\n forbidden: reason\n })]\n }\n\n // Copy relevant properties from the \"latest\" published version to root\n function latestCopy(doc) {\n d('latestCopy', doc['dist-tags'], doc)\n\n if (!doc['dist-tags'] || !doc.versions)\n return\n\n var copyFields = [\n \"description\",\n \"homepage\",\n \"keywords\",\n \"repository\",\n \"contributors\",\n \"author\",\n \"bugs\",\n \"license\"\n ]\n\n var latest = doc.versions &&\n doc['dist-tags'] &&\n doc.versions[doc[\"dist-tags\"].latest]\n if (latest && typeof latest === \"object\") {\n copyFields.forEach(function(k) {\n if (!latest[k])\n delete doc[k]\n else\n doc[k] = latest[k]\n })\n }\n }\n\n function descTrim(doc) {\n if (doc.description && doc.description.length > 255) {\n doc.description = doc.description.slice(0, 255)\n }\n if (doc.versions) {\n for (var v in doc.versions) {\n descTrim(doc.versions[v])\n }\n }\n }\n\n // Clean up excessive readmes and move to root of doc.\n function readmeTrim(doc) {\n var changed = false\n var readme = doc.readme || ''\n var readmeFilename = doc.readmeFilename || ''\n if (doc['dist-tags'] && doc['dist-tags'].latest) {\n var latest = doc.versions[doc['dist-tags'].latest]\n if (latest && latest.readme) {\n readme = latest.readme\n readmeFilename = latest.readmeFilename || ''\n }\n }\n\n for (var v in doc.versions) {\n // If we still don't have one, just take the first one.\n if (doc.versions[v].readme && !readme)\n readme = doc.versions[v].readme\n if (doc.versions[v].readmeFilename && !readmeFilename)\n readmeFilename = doc.versions[v].readmeFilename\n\n if (doc.versions[v].readme)\n changed = true\n\n delete doc.versions[v].readme\n delete doc.versions[v].readmeFilename\n }\n\n if (readme && readme.length > README_MAXLEN) {\n changed = true\n readme = readme.slice(0, README_MAXLEN)\n }\n doc.readme = readme\n doc.readmeFilename = readmeFilename\n\n return changed\n }\n\n // return ok(result, message) to exit successfully at any point.\n // Does some final data integrity cleanup stuff.\n function ok (doc, message) {\n // access is handled elsewhere, and should not be stored.\n delete doc.access\n delete doc.mtime\n delete doc.ctime\n var time = doc.time = doc.time || {}\n time.modified = now\n time.created = time.created || time.modified\n for (var v in doc.versions) {\n var ver = doc.versions[v]\n delete ver.ctime\n delete ver.mtime\n time[v] = time[v] || now\n }\n delete doc.time.unpublished\n\n findLatest(doc)\n latestCopy(doc)\n readmeTrim(doc)\n descTrim(doc)\n\n if (!doc.maintainers)\n return error(\"no maintainers. Please upgrade your npm client.\")\n\n if (output.length) {\n message += \"\\n\" + output.map(function(n) {\n return n.map(function(a) {\n return JSON.stringify(a)\n }).join(\" \")\n }).join(\"\\n\")\n }\n return [doc, JSON.stringify({ok:message})]\n }\n\n function findLatest(doc) {\n var tags = doc['dist-tags'] = doc['dist-tags'] || {}\n var versions = doc.versions = doc.versions || {}\n var lv = tags.latest\n var latest = versions[lv]\n if (latest)\n return\n\n // figure out what the \"latest\" tag should be\n var vers = Object.keys(versions)\n if (!vers.length) return\n\n vers = vers.sort(semver.compare)\n tags.latest = vers.pop()\n }\n\n // Create new package doc\n function newDoc (doc) {\n if (!doc._id) doc._id = doc.name\n if (!doc.versions) doc.versions = {}\n var latest\n for (var v in doc.versions) {\n if (!semver.valid(v, true))\n return error(\"Invalid version: \"+JSON.stringify(v))\n var p = doc.versions[v]\n latest = semver.clean(v, true)\n }\n if (!doc['dist-tags']) doc['dist-tags'] = {}\n\n if (latest && !doc['dist-tags'].latest) {\n doc[\"dist-tags\"].latest = latest\n }\n\n return ok(doc, \"created new entry\")\n }\n\n function addTag(tag, ver) {\n // tag\n if (!semver.valid(ver)) {\n return error(\"setting tag \"+tag+\" to invalid version: \"+ver)\n }\n if (!doc.versions || !doc.versions[ver]) {\n return error(\"setting tag \"+tag+\" to unknown version: \"+ver)\n }\n doc[\"dist-tags\"][tag] = semver.clean(ver, true)\n return ok(doc, \"updated tag\")\n }\n\n function addNewVersion(ver, body) {\n d('addNewVersion ver=', ver)\n if (typeof body !== \"object\" || !body) {\n return error(\"putting invalid object to version \"+req.query.version)\n }\n\n if (!semver.valid(ver, true)) {\n return error(\"invalid version: \"+ver)\n }\n\n if (doc.versions) {\n if ((ver in doc.versions) ||\n (semver.clean(ver, true) in doc.versions)) {\n // attempting to overwrite an existing version.\n // not allowed\n return error(\"cannot modify existing version\")\n }\n }\n\n if (body.name !== doc.name || body.name !== doc._id) {\n return error( \"Invalid name: \"+JSON.stringify(body.name))\n }\n\n body.version = semver.clean(body.version, true)\n ver = semver.clean(ver, true)\n if (body.version !== ver) {\n return error( \"version in doc doesn't match version in request: \"\n + JSON.stringify(body.version)\n + \" !== \" + JSON.stringify(ver) )\n }\n\n body._id = body.name + \"@\" + body.version\n d(\"set body.maintainers to doc.maintainers\", doc.maintainers)\n body.maintainers = doc.maintainers\n body._npmUser = body._npmUser || { name: req.userCtx.name }\n\n if (body.publishConfig && typeof body.publishConfig === 'object') {\n Object.keys(body.publishConfig).filter(function (k) {\n return k.match(/^_/)\n }).forEach(function (k) {\n delete body.publishConfig[k]\n })\n }\n\n var tag = req.query.tag\n || (body.publishConfig && body.publishConfig.tag)\n || body.tag\n || \"latest\"\n\n doc[\"dist-tags\"] = doc[\"dist-tags\"] || {}\n doc.versions = doc.versions || {}\n doc.time = doc.time || {}\n\n if (!req.query.pre)\n doc[\"dist-tags\"][tag] = body.version\n\n if (!doc[\"dist-tags\"].latest)\n doc[\"dist-tags\"].latest = body.version\n\n doc.versions[ver] = body\n doc.time = doc.time || {}\n doc.time[ver] = now\n\n return ok(doc, \"added version\")\n }\n\n function isError(res) {\n return res && res[0]._id === '.error.'\n }\n\n function mergeVersions(newdoc, doc) {\n if (!newdoc.versions)\n return\n\n // If we are passing in the _rev, then that means that the client has\n // fetched the current doc, and explicitly chosen to remove stuff\n // If they aren't passing in a matching _rev, then just merge in\n // new stuff, don't allow clobbering, and ignore missing versions.\n var revMatch = newdoc._rev === doc._rev\n\n if (!doc.versions) doc.versions = {}\n for (var v in newdoc.versions) {\n var nv = newdoc.versions[v]\n var ov = doc.versions[v]\n\n if (ov && !ov.directories &&\n JSON.stringify(nv.directories) === '{}') {\n delete nv.directories\n }\n\n if (!ov) {\n var vc = semver.clean(v, true)\n if (!vc || v !== vc)\n return error('Invalid version: ' + v)\n var res = addNewVersion(v, newdoc.versions[v])\n if (isError(res))\n return res\n } else if (nv.deprecated) {\n ov.deprecated = nv.deprecated\n } else if (!deepEquals(nv, ov, [[\"deprecated\"]])) {\n d('old=%j', ov)\n d('new=%j', nv)\n // Trying to change an existing version! Shenanigans!\n // XXX: we COULD just skip this version, and pretend\n // it worked, without actually updating. The vdu would\n // catch it anyway. Problem there is that then the user\n // doesn't see their stuff update, and wonders why.\n return error(\n 'cannot modify pre-existing version: ' + v + '\\n' +\n 'old=' + JSON.stringify(ov) + '\\n' +\n 'new=' + JSON.stringify(nv))\n } else if (ov.deprecated && !nv.deprecated) {\n delete ov.deprecated\n }\n\n }\n\n if (revMatch) {\n for (var v in doc.versions) {\n if (!newdoc.versions[v])\n delete doc.versions[v]\n }\n }\n }\n\n function mergeUsers(newdoc, doc) {\n // Note: it IS actually legal to just PUT {_id,users:{..}}\n // since it'll just merge it in.\n if (!newdoc.users)\n return\n\n if (!doc.users) doc.users = {}\n if (newdoc.users[req.userCtx.name])\n doc.users[req.userCtx.name] = newdoc.users[req.userCtx.name]\n else\n delete doc.users[req.userCtx.name]\n }\n\n function mergeAttachments(newdoc, doc) {\n if (!newdoc._attachments)\n return\n if (!doc._attachments) doc._attachments = {}\n var inline = false\n for(var k in newdoc._attachments) {\n if(newdoc._attachments[k].data) {\n doc._attachments[k] = newdoc._attachments[k]\n inline = true\n }\n }\n }\n\n function updateDoc(newdoc, doc) {\n if (doc.time && doc.time.unpublished) {\n d(\"previously unpublished\", doc.time.unpublished)\n newdoc._rev = doc._rev\n delete doc.time.unpublished\n }\n\n // Only allow maintainer update if the rev matches\n if (newdoc.maintainers && newdoc._rev === doc._rev) {\n d(\"set doc.maintainers to newdoc.maintainers\", newdoc.maintainers)\n doc.maintainers = newdoc.maintainers\n }\n\n // Don't copy over a dist-tags that is:\n // a) empty\n // b) not an object\n if (newdoc[\"dist-tags\"] && typeof newdoc[\"dist-tags\"] === \"object\") {\n var tags = Object.keys(newdoc[\"dist-tags\"])\n if (tags.length) {\n doc[\"dist-tags\"] = doc[\"dist-tags\"] || {}\n tags.forEach(function (t) {\n doc[\"dist-tags\"][t] = newdoc[\"dist-tags\"][t]\n })\n }\n // If the user sent us a single dist-tags entry, then treat it as\n // the effective \"?tag=foo\" param, for the purporses of updating.\n if (tags.length === 1) {\n if (!req.query.tag)\n req.query.tag = tags[0]\n }\n }\n\n // Don't update the readme if we're publishing an alpha/pre-release\n // only if it's a new default \"latest\" version\n if (!req.query.pre && (!req.query.tag || req.query.tag === \"latest\")) {\n for (var i in newdoc) {\n if (typeof newdoc[i] === \"string\") {\n doc[i] = newdoc[i]\n }\n }\n }\n\n\n var res = mergeVersions(newdoc, doc)\n if (isError(res))\n return res\n\n var res = mergeUsers(newdoc, doc)\n if (isError(res))\n return res\n\n var res = mergeAttachments(newdoc, doc)\n if (isError(res))\n return res\n\n return ok(doc, \"updated package\")\n }\n}"},"rewrites":[{"from":"/","to":"../..","method":"GET"},{"from":"/_session","to":"../../../_session","method":"GET"},{"from":"/_session","to":"../../../_session","method":"PUT"},{"from":"/_session","to":"../../../_session","method":"POST"},{"from":"/_session","to":"../../../_session","method":"DELETE"},{"from":"/_session","to":"../../../_session","method":"HEAD"},{"from":"/-/ping","to":"/_show/ping","method":"GET"},{"from":"/-/ping/*","to":"/_show/ping","method":"GET"},{"from":"/-/whoami","to":"/_show/whoami","method":"GET"},{"from":"/-/package/:pkg/access","to":"/_show/notImplemented"},{"from":"/-/package/:pkg/dist-tags","to":"/_show/distTags/:pkg","method":"GET"},{"from":"/-/package/:pkg/dist-tags/:tag","to":"/_update/distTags/:pkg","method":"DELETE"},{"from":"/-/package/:pkg/dist-tags/:tag","to":"/_update/distTags/:pkg","method":"PUT"},{"from":"/-/package/:pkg/dist-tags/:tag","to":"/_update/distTags/:pkg","method":"POST"},{"from":"/-/package/:pkg/dist-tags","to":"/_update/distTags/:pkg","method":"PUT"},{"from":"/-/package/:pkg/dist-tags","to":"/_update/distTags/:pkg","method":"POST"},{"from":"/-/all/since","to":"_list/index/modified","method":"GET"},{"from":"/-/rss","to":"_list/rss/modified","method":"GET"},{"from":"/-/rss/:package","to":"_list/rss/modifiedPackage","method":"GET"},{"from":"/-/all","to":"_list/index/listAll","method":"GET"},{"from":"/-/all/-/jsonp/:jsonp","to":"_list/index/listAll","method":"GET"},{"from":"/-/scripts","to":"_list/scripts/scripts","method":"GET"},{"from":"/-/by-field","to":"_list/byField/byField","method":"GET"},{"from":"/-/fields","to":"_list/sortCount/fieldsInUse","method":"GET","query":{"group":"true"}},{"from":"/-/needbuild","to":"_list/needBuild/needBuild","method":"GET"},{"from":"/favicon.ico","to":"../../npm/favicon.ico","method":"GET"},{"from":"/-/user/:user","to":"../../../_users/:user","method":"PUT"},{"from":"/-/user/:user/-rev/:rev","to":"../../../_users/:user","method":"PUT"},{"from":"/-/user/:user","to":"../../../_users/:user","method":"GET"},{"from":"/_users/:user","to":"../../../_users/:user","method":"PUT"},{"from":"/_users/:user","to":"../../../_users/:user","method":"GET"},{"from":"/public_users/:user","to":"../../../public_users/:user","method":"PUT"},{"from":"/public_users/:user","to":"../../../public_users/:user","method":"GET"},{"from":"/-/user-by-email/:email","to":"../../../_users/_design/_auth/_list/email/listAll","method":"GET"},{"from":"/-/top","to":"_view/npmTop","query":{"group_level":1},"method":"GET"},{"from":"/-/by-user/:user","to":"_list/byUser/byUser","method":"GET"},{"from":"/-/starred-by-user/:user","to":"_list/byUser/starredByUser","method":"GET"},{"from":"/-/starred-by-package/:user","to":"_list/byUser/starredByPackage","method":"GET"},{"from":"/:pkg","to":"/_show/package/:pkg","method":"GET"},{"from":"/:pkg/-/jsonp/:jsonp","to":"/_show/package/:pkg","method":"GET"},{"from":"/:pkg/:version","to":"_show/package/:pkg","method":"GET"},{"from":"/:pkg/:version/-/jsonp/:jsonp","to":"_show/package/:pkg","method":"GET"},{"from":"/npm/public/registry/:firstletter/:pkg/_attachments/:att","to":"../../:pkg/:att","method":"GET"},{"from":"/npm/public/registry/:firstletter/:pkg/_attachments/:att/:rev","to":"../../:pkg/:att","method":"PUT"},{"from":"/npm/public/registry/:firstletter/:pkg/_attachments/:att/-rev/:rev","to":"../../:pkg/:att","method":"PUT"},{"from":"/npm/public/registry/:firstletter/:pkg/_attachments/:att/:rev","to":"../../:pkg/:att","method":"DELETE"},{"from":"/npm/public/registry/:firstletter/:pkg/_attachments/:att/-rev/:rev","to":"../../:pkg/:att","method":"DELETE"},{"from":"/npm/public/registry/:pkg/_attachments/:att","to":"../../:pkg/:att","method":"GET"},{"from":"/npm/public/registry/:pkg/_attachments/:att/:rev","to":"../../:pkg/:att","method":"PUT"},{"from":"/npm/public/registry/:pkg/_attachments/:att/-rev/:rev","to":"../../:pkg/:att","method":"PUT"},{"from":"/npm/public/registry/:pkg/_attachments/:att/:rev","to":"../../:pkg/:att","method":"DELETE"},{"from":"/npm/public/registry/:pkg/_attachments/:att/-rev/:rev","to":"../../:pkg/:att","method":"DELETE"},{"from":"/:pkg/-/:att","to":"../../:pkg/:att","method":"GET"},{"from":"/:pkg/-/:att/:rev","to":"../../:pkg/:att","method":"PUT"},{"from":"/:pkg/-/:att/-rev/:rev","to":"../../:pkg/:att","method":"PUT"},{"from":"/:pkg/-/:att/:rev","to":"../../:pkg/:att","method":"DELETE"},{"from":"/:pkg/-/:att/-rev/:rev","to":"../../:pkg/:att","method":"DELETE"},{"from":"/:pkg","to":"/_update/package/:pkg","method":"PUT"},{"from":"/:pkg/-rev/:rev","to":"/_update/package/:pkg","method":"PUT"},{"from":"/:pkg/:version","to":"_update/package/:pkg","method":"PUT"},{"from":"/:pkg/:version/-rev/:rev","to":"_update/package/:pkg","method":"PUT"},{"from":"/:pkg/:version/-tag/:tag","to":"_update/package/:pkg","method":"PUT"},{"from":"/:pkg/:version/-tag/:tag/-rev/:rev","to":"_update/package/:pkg","method":"PUT"},{"from":"/-metadata/:pkg","to":"_update/metadata/:pkg","method":"PUT"},{"from":"/:pkg/:version/-pre/:pre","to":"_update/package/:pkg","method":"PUT"},{"from":"/:pkg/:version/-pre/:pre/-rev/:rev","to":"_update/package/:pkg","method":"PUT"},{"from":"/:pkg/-rev/:rev","to":"_update/delete/:pkg","method":"DELETE"},{"from":"/-/_view/*","to":"_view/*","method":"GET"},{"from":"/-/_list/*","to":"_list/*","method":"GET"},{"from":"/-/_show/*","to":"_show/*","method":"GET"}],"lists":{"short":"function (head, req) {\n require(\"monkeypatch\").patch(Object, Date, Array, String)\n\n var out = {}\n , row\n , show = (req.query.show || \"\").split(\",\")\n , v = show.indexOf(\"version\") !== -1\n , t = show.indexOf(\"tag\") !== -1\n while (row = getRow()) {\n if (!row.id) continue\n if (!t && !v) {\n out[row.id] = true\n continue\n }\n var val = row.value\n if (t) Object.keys(val[\"dist-tags\"] || {}).forEach(function (t) {\n out[row.id + \"@\" + t] = true\n })\n if (v) Object.keys(val.versions || {}).forEach(function (v) {\n out[row.id + \"@\" + v] = true\n })\n }\n send(toJSON(Object.keys(out)))\n}","rss":"function (head, req) {\n function pad(n){return n<10 ? '0'+n : n}\n Date.prototype.toISOString = Date.prototype.toISOString ||\n function toISOString(){\n var d = this;\n return d.getUTCFullYear()+'-'\n + pad(d.getUTCMonth()+1)+'-'\n + pad(d.getUTCDate())+'T'\n + pad(d.getUTCHours())+':'\n + pad(d.getUTCMinutes())+':'\n + pad(d.getUTCSeconds())+'Z'}\n\n var limit = +req.query.limit\n , desc = req.query.descending\n if (!desc || !limit || limit > 50 || limit < 0) {\n start({ code: 403\n , headers: { 'Content-type': 'text/xml' }})\n send('')\n return\n }\n\n start({ code: 200\n // application/rss+xml is correcter, but also annoyinger\n , headers: { \"Content-Type\": \"text/xml\" } })\n send(''\n +'\\n'\n +'\\n'\n +'\\n '\n +'\\n npm recent updates'\n +'\\n http://search.npmjs.org/'\n +'\\n Updates to the npm package registry'\n +'\\n en')\n\n var row\n while (row = getRow()) {\n if (!row.value || !row.value[\"dist-tags\"]) continue\n\n var doc = row.value\n var authors = doc.maintainers.map(function (m) {\n return '' + m.name + ''\n }).join('\\n ')\n\n var latest = doc[\"dist-tags\"].latest\n var time = doc.time && doc.time[latest]\n var date = new Date(time)\n doc = doc.versions[latest]\n if (!doc || !time || !date) continue\n\n var url = \"https://npmjs.org/package/\" + doc.name\n\n send('\\n '\n +'\\n ' + doc._id + ''\n +'\\n ' + url + ''\n +'\\n ' + authors\n +'\\n '\n +'\\n ' + date.toISOString() + ''\n +'\\n ')\n }\n send('\\n '\n +'\\n')\n}","index":"function (head, req) {\n require(\"monkeypatch\").patch(Object, Date, Array, String)\n var basePath = req.requested_path\n if (basePath.indexOf(\"_list\") === -1) basePath = \"\"\n else {\n basePath = basePath.slice(0, basePath.indexOf(\"_list\"))\n .concat([\"_rewrite\", \"\"]).join(\"/\")\n }\n\n var row\n , semver = require(\"semver\")\n , res = []\n\n if (req.query.jsonp) send(req.query.jsonp + \"(\")\n send('{\"_updated\":' + Date.now())\n while (row = getRow()) {\n if (!row.id) continue\n\n var doc = row.value\n\n // We are intentionally not showing scoped modules in this list.\n // Since they may potentially be user-restricted, showing them\n // in the search endpoint leaks information. They get left out\n // by the fact that their _id is equal to the uri-encoded _id\n if (!doc.name || !doc._id ||\n encodeURIComponent(doc._id) !== doc._id) continue\n\n var p = {}\n\n // legacy kludge\n delete doc.mtime\n delete doc.ctime\n if (doc.versions) for (var v in doc.versions) {\n var clean = semver.clean(v)\n delete doc.versions[v].ctime\n delete doc.versions[v].mtime\n if (clean !== v) {\n var x = doc.versions[v]\n delete doc.versions[v]\n x.version = v = clean\n doc.versions[clean] = x\n }\n }\n if (doc[\"dist-tags\"]) for (var tag in doc[\"dist-tags\"]) {\n var clean = semver.clean(doc[\"dist-tags\"][tag])\n if (!clean) delete doc[\"dist-tags\"][tag]\n else doc[\"dist-tags\"][tag] = clean\n }\n // end kludge\n\n for (var i in doc) {\n if (i === \"versions\" || i.charAt(0) === \"_\" || i === 'readme' ||\n i === 'time') continue\n p[i] = doc[i]\n }\n if (doc.time) {\n p.time = { modified: doc.time.modified }\n }\n if (p['dist-tags'] && typeof p['dist-tags'] === 'object') {\n p.versions = Object.keys(p['dist-tags']).reduce(function (ac, v) {\n ac[ p['dist-tags'][v] ] = v\n return ac\n }, {})\n }\n if (doc.repositories && Array.isArray(doc.repositories)) {\n doc.repository = doc.repositories[0]\n delete doc.repositories\n }\n if (doc.repository) p.repository = doc.repository\n if (doc.description) p.description = doc.description\n for (var i in doc.versions) {\n if (doc.versions[i].repository && !doc.repository) {\n p.repository = doc.versions[i].repository\n }\n if (doc.versions[i].keywords) p.keywords = doc.versions[i].keywords\n }\n send(',' + JSON.stringify(doc._id) + ':' + JSON.stringify(p))\n }\n send('}')\n if (req.query.jsonp) send(')')\n\n}","byField":"function (head, req) {\n require(\"monkeypatch\").patch(Object, Date, Array, String)\n\n if (!req.query.field) {\n start({\"code\":\"400\", \"headers\": {\"Content-Type\": \"application/json\"}})\n send('{\"error\":\"Please specify a field parameter\"}')\n return\n }\n\n start({\"code\": 200, \"headers\": {\"Content-Type\": \"application/json\"}})\n var row\n , out = {}\n , field = req.query.field\n , not = field.charAt(0) === \"!\"\n if (not) field = field.substr(1)\n while (row = getRow()) {\n if (!row.id) continue\n var has = row.value.hasOwnProperty(field)\n if (!not && !has || not && has) continue\n out[row.key] = { \"maintainers\": row.value.maintainers.map(function (m) {\n return m.name + \" <\" + m.email + \">\"\n }) }\n if (has) out[row.key][field] = row.value[field]\n }\n send(JSON.stringify(out))\n}","needBuild":"function (head, req) {\n start({\"code\": 200, \"headers\": {\"Content-Type\": \"text/plain\"}});\n var row\n , first = true\n while (row = getRow()) {\n if (!row.id) continue\n if (req.query.bindist && row.value[req.query.bindist]) continue\n // out.push(row.key)\n send((first ? \"{\" : \",\")\n + JSON.stringify(row.key)\n + \":\"\n + JSON.stringify(Object.keys(row.value))\n + \"\\n\")\n first = false\n }\n send(\"}\\n\")\n}","scripts":"function (head, req) {\n var row\n , out = {}\n , scripts = req.query.scripts && req.query.scripts.split(\",\")\n , match = req.query.match\n\n if (match) match = new RegExp(match)\n\n while (row = getRow()) {\n inc = true\n if (!row.id) continue\n if (req.query.package && row.id !== req.query.package) continue\n if (scripts && scripts.length) {\n var inc = false\n for (var s = 0, l = scripts.length; s < l && !inc; s ++) {\n inc = row.value[scripts[s]]\n if (match) inc = inc && row.value[scripts[s]].match(match)\n }\n if (!inc) continue\n }\n out[row.id] = row.value\n }\n send(toJSON(out))\n}","byUser":"function (head, req) {\n var out = {}\n , user = req.query.user && req.query.user !== \"-\" ? req.query.user : null\n , users = user && user.split(\"|\")\n while (row = getRow()) {\n if (!user || users.indexOf(row.key) !== -1) {\n var l = out[row.key] = out[row.key] || []\n l.push(row.value)\n }\n }\n send(toJSON(out))\n}","sortCount":"function (head, req) {\n var out = []\n while (row = getRow()) {\n out.push([row.key, row.value])\n }\n out = out.sort(function (a, b) {\n return a[1] === b[1] ? 0\n : a[1] < b[1] ? 1 : -1\n })\n var outObj = {}\n for (var i = 0, l = out.length; i < l; i ++) {\n outObj[out[i][0]] = out[i][1]\n }\n send(toJSON(outObj))\n}"},"validate_doc_update":"function (doc, oldDoc, user, dbCtx) {\n var d\n if (typeof console === \"object\" &&\n typeof process === \"object\" &&\n typeof process.env === \"object\" &&\n /\\bvdu\\b/.test(process.env.NODE_DEBUG)) {\n d = console.error\n } else {\n d = function() {}\n }\n\n function assert (ok, message) {\n if (!ok) throw {forbidden:message}\n d(\"pass: \" + message)\n }\n\n // can't write to the db without logging in.\n if (!user || !user.name) {\n throw { forbidden: \"Please log in before writing to the db\" }\n }\n\n try {\n require(\"monkeypatch\").patch(Object, Date, Array, String)\n } catch (er) {\n assert(false, \"failed monkeypatching\")\n }\n\n try {\n var semver = require(\"semver\")\n var valid = require(\"valid\")\n var deep = require(\"deep\")\n var deepEquals = deep.deepEquals\n var scope = require(\"scope\")\n } catch (er) {\n assert(false, \"failed loading modules\")\n }\n\n function descTrim(doc) {\n if (doc.description && doc.description.length > 255) {\n doc.description = doc.description.slice(0, 255)\n }\n if (doc.versions) {\n for (var v in doc.versions) {\n descTrim(doc.versions[v])\n }\n }\n }\n\n // We always allow anyone to remove extraneous readme data,\n // and this is done in the process of starring or other non-publish\n // updates that go through the _update/package function anyway.\n // Just trim both, and go from that assumption.\n var README_MAXLEN = 64 * 1024\n function readmeTrim(doc) {\n var changed = false\n var readme = doc.readme || ''\n var readmeFilename = doc.readmeFilename || ''\n\n if (doc['dist-tags'] && doc['dist-tags'].latest) {\n var latest = doc.versions[doc['dist-tags'].latest]\n if (latest && latest.readme) {\n readme = latest.readme\n readmeFilename = latest.readmeFilename || ''\n }\n }\n\n for (var v in doc.versions) {\n // If we still don't have one, just take the first one.\n if (doc.versions[v].readme && !readme)\n readme = doc.versions[v].readme\n if (doc.versions[v].readmeFilename && !readmeFilename)\n readmeFilename = doc.versions[v].readmeFilename\n\n if (doc.versions[v].readme)\n changed = true\n\n delete doc.versions[v].readme\n delete doc.versions[v].readmeFilename\n }\n\n if (readme && readme.length > README_MAXLEN) {\n changed = true\n readme = readme.slice(0, README_MAXLEN)\n }\n doc.readme = readme\n doc.readmeFilename = readmeFilename\n\n return changed\n }\n\n // Copy relevant properties from the \"latest\" published version to root\n function latestCopy(doc) {\n if (!doc['dist-tags'] || !doc.versions)\n return\n\n var copyFields = [\n \"description\",\n \"homepage\",\n \"keywords\",\n \"repository\",\n \"contributors\",\n \"author\",\n \"bugs\",\n \"license\"\n ]\n\n var latest = doc.versions &&\n doc['dist-tags'] &&\n doc.versions[doc[\"dist-tags\"].latest]\n if (latest && typeof latest === \"object\") {\n copyFields.forEach(function(k) {\n if (!latest[k])\n delete doc[k]\n else\n doc[k] = latest[k]\n })\n }\n }\n\n function finishing(doc) {\n if (doc && doc.versions) {\n readmeTrim(doc)\n descTrim(doc)\n latestCopy(doc)\n }\n }\n\n\n finishing(doc)\n finishing(oldDoc)\n\n try {\n if (oldDoc) oldDoc.users = oldDoc.users || {}\n doc.users = doc.users || {}\n } catch (er) {\n assert(false, \"failed checking users\")\n }\n\n // you may not delete the npm document!\n if ((doc._deleted || (doc.time && doc.time.unpublished))\n && doc._id === \"npm\")\n throw { forbidden: \"you may not delete npm!\" }\n\n\n // if the doc is an {error:\"blerg\"}, then throw that right out.\n // something detected in the _updates/package script.\n // XXX: Make this not ever happen ever. Validation belongs here,\n // not in the update function.\n assert(!doc.forbidden, doc.forbidden)\n\n // admins can do ANYTHING (even break stuff)\n try {\n if (isAdmin()) return\n } catch (er) {\n assert(false, \"failed checking admin-ness\")\n }\n\n // figure out what changed in the doc.\n function diffObj (o, n, p) {\n p = p || \"\"\n var d = []\n var seenKeys = []\n\n for (var i in o) {\n seenKeys.push(i)\n if (n[i] === undefined) {\n d.push(\"Deleted: \"+p+i)\n }\n else if (typeof o[i] !== typeof n[i]) {\n d.push(\"Changed Type: \"+p+i)\n }\n else if (typeof o[i] === \"object\") {\n if (o[i]) {\n if (n[i]) {\n d = d.concat(diffObj(o[i], n[i], p + i + \".\"))\n } else {\n d.push(\"Nulled: \"+p+i)\n }\n } else {\n if (n[i]) {\n d.push(\"Un-nulled: \"+p+i)\n } else {\n // they're both null, and thus equal. do nothing.\n }\n }\n }\n // non-object, non-null\n else if (o[i] !== n[i]) {\n d.push(\"Changed: \"+p+i+\" \"+JSON.stringify(o[i]) + \" -> \"\n +JSON.stringify(n[i]))\n }\n }\n\n for (var i in n) {\n if (-1 === seenKeys.indexOf(i)) {\n d.push(\"Added: \"+p+i)\n }\n }\n return d\n }\n\n assert(!doc._deleted, \"deleting docs directly not allowed.\\n\" +\n \"Use the _update/delete method.\")\n\n assert(doc.name === doc._id, \"name must match _id\")\n assert(doc.name.length < 512, \"name is too long\")\n assert(!doc.mtime, \"doc.mtime is deprecated\")\n assert(!doc.ctime, \"doc.ctime is deprecated\")\n assert(typeof doc.time === \"object\", \"time must be object\")\n\n // everyone may alter his \"starred\" status on any package\n if (oldDoc &&\n !doc.time.unpublished &&\n deepEquals(doc, oldDoc,\n [[\"users\", user.name], [\"time\", \"modified\"]])) {\n if (doc.users && (user.name in doc.users)) {\n assert(typeof doc.users[user.name] === \"boolean\",\n \"star setting must be a boolean, got \" + (typeof doc.users[user.name]))\n }\n return\n }\n\n\n // check if the user is allowed to write to this package.\n function validUser () {\n // Admins can edit any packages\n if (isAdmin()) return true\n\n // scoped packages require that the user have the entity name as a role\n // They must ALSO be in the \"maintainers\" list by role or name.\n var roles = user && user.roles || []\n var s = scope.parse(doc.name)\n var entity = s[0]\n if (entity && roles.indexOf(entity) === -1) {\n return false\n }\n\n // At this point, they can publish if either the thing doesn't exist,\n // or they are one of the maintainers.\n // Unpublished packages don't have a \"maintainers\" property.\n if ( !oldDoc || !oldDoc.maintainers ) return true\n\n for (var i = 0, l = oldDoc.maintainers.length; i < l; i ++) {\n // In the maintainer list by name.\n if (oldDoc.maintainers[i].name === user.name) return true\n\n // in the maintainer list by role.\n var role = oldDoc.maintainers[i].role\n if (role && roles && typeof role === \"string\") {\n if (roles.indexOf(role) !== -1) return true\n }\n }\n\n // Not an owner, cannot publish.\n return false\n }\n\n function isAdmin () {\n if (dbCtx &&\n dbCtx.admins) {\n if (dbCtx.admins.names &&\n dbCtx.admins.roles &&\n Array.isArray(dbCtx.admins.names) &&\n dbCtx.admins.names.indexOf(user.name) !== -1) return true\n if (Array.isArray(dbCtx.admins.roles)) {\n for (var i = 0; i < user.roles.length; i++) {\n if (dbCtx.admins.roles.indexOf(user.roles[i]) !== -1) return true\n }\n }\n }\n return user && user.roles.indexOf(\"_admin\") >= 0\n }\n\n try {\n var vu = validUser()\n } catch (er) {\n assert(false, \"problem checking user validity\");\n }\n\n if (!vu) {\n assert(vu, \"user: \" + user.name + \" not authorized to modify \"\n + doc.name + \"\\n\"\n + diffObj(oldDoc, doc).join(\"\\n\"))\n }\n\n // unpublishing. no sense in checking versions\n if (doc.time.unpublished) {\n d(doc)\n assert(oldDoc, \"nothing to unpublish\")\n if (oldDoc.time)\n assert(!oldDoc.time.unpublished, \"already unpublished\")\n var name = user.name\n var unpublisher = doc.time.unpublished.name\n assert(name === unpublisher, name + \"!==\" + unpublisher)\n var k = []\n for (var i in doc)\n if (!i.match(/^_/)) k.push(i)\n k = k.sort().join(\",\")\n var e = \"name,time,users\"\n assert(k === e, \"must only have \" + e + \", has:\" + k)\n assert(JSON.stringify(doc.users) == '{}',\n 'must remove users when unpublishing')\n return\n }\n\n\n // Now we know that it is not an unpublish.\n assert(typeof doc['dist-tags'] === 'object', 'dist-tags must be object')\n // old crusty npm's would first PUT with dist-tags={} and versions={}\n // however, if we HAVE keys in versions, then dist-tags must also have\n // a \"latest\" key, and all dist-tags keys must point to extant versions\n var tags = Object.keys(doc['dist-tags'])\n var vers = Object.keys(doc.versions)\n if (vers.length > 0) {\n assert(tags.length > 0, 'may not remove dist-tags')\n assert(doc['dist-tags'].latest, 'must have a \"latest\" dist-tag')\n for (var i = 0; i < tags.length; i ++) {\n var tag = tags[i]\n assert(typeof doc['dist-tags'][tag] === 'string',\n 'dist-tags values must be strings')\n assert(doc.versions[doc['dist-tags'][tag]],\n 'tag points to invalid version: '+tag)\n }\n }\n\n // sanity checks.\n var s = scope.parse(doc.name)\n var entity = s[0]\n var name = s[1]\n assert(valid.name(name), \"name invalid: \"+name)\n\n // New documents may only be created with all lowercase names.\n // At some point, existing docs will be migrated to lowercase names\n // as well.\n if (!oldDoc && doc.name !== doc.name.toLowerCase()) {\n assert(false, \"New packages must have all-lowercase names\")\n }\n\n assert(typeof doc[\"dist-tags\"] === \"object\", \"dist-tags must be object\")\n\n var versions = doc.versions\n assert(typeof versions === \"object\", \"versions must be object\")\n\n var latest = doc[\"dist-tags\"].latest\n if (latest) {\n assert(versions[latest], \"dist-tags.latest must be valid version\")\n }\n\n // the 'latest' version must have a dist and shasum\n // I'd like to also require this of all past versions, but that\n // means going back and cleaning up about 2000 old package versions,\n // or else *new* versions of those packages can't be published.\n // Until that time, do this instead:\n var version = versions[latest]\n if (version) {\n assert(version.dist, \"no dist object in \" + latest + \" version\")\n assert(version.dist.tarball, \"no tarball in \" + latest + \" version\")\n assert(version.dist.shasum, \"no shasum in \" + latest + \" version\")\n }\n\n for (var v in doc[\"dist-tags\"]) {\n var ver = doc[\"dist-tags\"][v]\n assert(semver.valid(ver, true),\n v + \" version invalid version: \" + ver)\n assert(versions[ver],\n v + \" version missing: \" + ver)\n }\n\n var depCount = 0\n var maxDeps = 1000\n\n function checkDep(version, dep, t) {\n ridiculousDeps()\n if (!entity) {\n assert(scope.isGlobal(dep),\n \"global packages may only depend on other global packages\")\n }\n }\n\n function ridiculousDeps() {\n if (++depCount > maxDeps)\n assert(false, \"too many deps. please be less ridiculous.\")\n }\n\n for (var ver in versions) {\n var version = versions[ver]\n assert(semver.valid(ver, true),\n \"invalid version: \" + ver)\n assert(typeof version === \"object\",\n \"version entries must be objects\")\n assert(version.version === ver,\n \"version must match: \"+ver)\n assert(version.name === doc._id,\n \"version \"+ver+\" has incorrect name: \"+version.name)\n\n assert(version.version === ver,\n \"Version mismatch: \"+JSON.stringify(ver)+\n \" !== \"+JSON.stringify(version.version))\n\n depCount = 0\n var types =\n [\"dependencies\", \"devDependencies\", \"optionalDependencies\"]\n types.forEach(function(t) {\n for (var dep in version[t] || {}) {\n checkDep(version, dep, t)\n }\n })\n\n // NEW versions must only have strings in the 'scripts' field,\n // and versions that are strictly valid semver 2.0\n if (oldDoc && oldDoc.versions && !oldDoc.versions[ver]) {\n assert(semver.valid(ver), \"Invalid SemVer 2.0 version: \" + ver)\n\n if (version.hasOwnProperty('scripts')) {\n assert(version.scripts && typeof version.scripts === \"object\",\n \"'scripts' field must be an object\")\n for (var s in version.scripts) {\n assert(typeof version.scripts[s] === \"string\",\n \"Non-string script field: \" + s)\n }\n }\n }\n }\n\n assert(Array.isArray(doc.maintainers),\n \"maintainers should be a list of owners\")\n doc.maintainers.forEach(function (m) {\n assert(m.name && m.email,\n \"Maintainer should have name and email: \" + JSON.stringify(m))\n })\n\n var time = doc.time\n var c = new Date(Date.parse(time.created))\n , m = new Date(Date.parse(time.modified))\n assert(c.toString() !== \"Invalid Date\",\n \"invalid created time: \" + JSON.stringify(time.created))\n\n assert(m.toString() !== \"Invalid Date\",\n \"invalid modified time: \" + JSON.stringify(time.modified))\n\n if (oldDoc &&\n oldDoc.time &&\n oldDoc.time.created &&\n Date.parse(oldDoc.time.created)) {\n assert(Date.parse(oldDoc.time.created) === Date.parse(time.created),\n \"created time cannot be changed\")\n }\n\n if (oldDoc && oldDoc.users) {\n assert(deepEquals(doc.users,\n oldDoc.users, [[user.name]]),\n \"you may only alter your own 'star' setting\")\n }\n\n Object.keys(doc.users || {}).forEach(function(u) {\n d(\"doc.users[%j] = %j\", u, doc.users[u])\n assert(typeof doc.users[u] === 'boolean',\n 'star settings must be boolean values')\n })\n\n if (doc.url) {\n assert(false,\n \"Package redirection has been removed. \"+\n \"Please update your publish scripts.\")\n }\n\n if (doc.description) {\n assert(typeof doc.description === 'string',\n '\"description\" field must be a string')\n }\n\n var oldVersions = oldDoc ? oldDoc.versions || {} : {}\n var oldTime = oldDoc ? oldDoc.time || {} : {}\n\n var versions = Object.keys(doc.versions || {})\n , allowedChange = [[\"directories\"], [\"deprecated\"]]\n\n for (var i = 0, l = versions.length; i < l; i ++) {\n var v = versions[i]\n if (!v) continue\n assert(doc.time[v], \"must have time entry for \"+v)\n\n // new npm's \"fix\" the version\n // but that makes it look like it's been changed.\n if (doc && doc.versions[v] && oldDoc && oldVersions[v]) {\n doc.versions[v].version = oldVersions[v].version\n\n // *removing* a readme is fine, too\n if (!doc.versions[v].readme && oldVersions[v].readme)\n doc.versions[v].readme = oldVersions[v].readme\n }\n\n if (doc.versions[v] && oldDoc && oldVersions[v]) {\n // Pre-existing version\n assert(deepEquals(doc.versions[v], oldVersions[v], allowedChange),\n \"Changing published version metadata is not allowed\")\n } else {\n // New version\n assert(typeof doc.versions[v]._npmUser === \"object\",\n \"_npmUser must be object: \" + v)\n assert(doc.versions[v]._npmUser.name === user.name,\n \"_npmUser.name must match user.name: \" + v)\n }\n }\n\n // now go through all the time settings that weren't covered\n for (var v in oldTime) {\n if (v === \"modified\" || v === \"unpublished\") continue\n assert(doc.time[v] === oldTime[v],\n \"Attempting to modify version \" + v + \",\\n\" +\n \"which was previously published on \" + oldTime[v] + \".\\n\" +\n \"This is forbidden, to maintain package integrity.\\n\" +\n \"Please update the version number and try again.\")\n }\n\n\n // Do not allow creating a NEW attachment for a version that\n // already had an attachment in its metadata.\n // All this can do is corrupt things.\n // doc, oldDoc\n var newAtt = doc._attachments || {}\n var oldAtt = oldDoc && oldDoc._attachments || {}\n var oldVersions = oldDoc && oldDoc.versions\n for (var f in newAtt) {\n if (oldAtt[f]) {\n // Same bits are ok.\n assert(oldAtt[f].digest === newAtt[f].digest &&\n oldAtt[f].length === newAtt[f].length,\n \"Cannot replace existing tarball attachment\")\n } else {\n // see if any version was using that version already\n for (var v in oldVersions) {\n var ver = oldVersions[v]\n var tgz = ver.dist && ver.dist.tarball\n var m = tgz.match(/[^\\/]+$/)\n if (!m) {\n continue\n }\n var tf = m[0]\n assert(tf !== f, 'Cannot replace existing tarball attachment')\n }\n }\n }\n\n}","language":"javascript","deep":"exports.deepEquals = deepEquals\nexports.extend = deepExtend\nfunction deepExtend(o1, o2) {\n // extend o1 with o2 (in-place)\n for (var prop in o2) {\n if (hOP(o2, prop)) {\n if (hOP(o1, prop)) {\n if (typeof o1[prop] === \"object\") {\n deepExtend(o1[prop], o2[prop])\n }\n } else {\n o1[prop] = o2[prop]\n }\n }\n }\n return o1\n }\nfunction fullPath(pathPrefix, p){\n return pathPrefix.concat([p])\n }\nfunction isObject(v){\n return typeof v === 'object'\n }\nfunction arrayInArray(v, arr) {\n // Check whether `arr` contains an array that's shallowly equal to `v`.\n return arr.some(function(e) {\n if (e.length !== v.length) return false\n for (var i=0; i)?=?)';\n\n// Something like \"2.*\" or \"1.2.x\".\n// Note that \"x.x\" is a valid xRange identifer, meaning \"any version\"\n// Only the first item is strictly required.\nvar XRANGEIDENTIFIERLOOSE = R++;\nsrc[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\\\*';\nvar XRANGEIDENTIFIER = R++;\nsrc[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\\\*';\n\nvar XRANGEPLAIN = R++;\nsrc[XRANGEPLAIN] = '[v=\\\\s]*(' + src[XRANGEIDENTIFIER] + ')' +\n '(?:\\\\.(' + src[XRANGEIDENTIFIER] + ')' +\n '(?:\\\\.(' + src[XRANGEIDENTIFIER] + ')' +\n '(?:' + src[PRERELEASE] + ')?' +\n src[BUILD] + '?' +\n ')?)?';\n\nvar XRANGEPLAINLOOSE = R++;\nsrc[XRANGEPLAINLOOSE] = '[v=\\\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' +\n '(?:\\\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' +\n '(?:\\\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' +\n '(?:' + src[PRERELEASELOOSE] + ')?' +\n src[BUILD] + '?' +\n ')?)?';\n\nvar XRANGE = R++;\nsrc[XRANGE] = '^' + src[GTLT] + '\\\\s*' + src[XRANGEPLAIN] + '$';\nvar XRANGELOOSE = R++;\nsrc[XRANGELOOSE] = '^' + src[GTLT] + '\\\\s*' + src[XRANGEPLAINLOOSE] + '$';\n\n// Tilde ranges.\n// Meaning is \"reasonably at or greater than\"\nvar LONETILDE = R++;\nsrc[LONETILDE] = '(?:~>?)';\n\nvar TILDETRIM = R++;\nsrc[TILDETRIM] = '(\\\\s*)' + src[LONETILDE] + '\\\\s+';\nre[TILDETRIM] = new RegExp(src[TILDETRIM], 'g');\nvar tildeTrimReplace = '$1~';\n\nvar TILDE = R++;\nsrc[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$';\nvar TILDELOOSE = R++;\nsrc[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$';\n\n// Caret ranges.\n// Meaning is \"at least and backwards compatible with\"\nvar LONECARET = R++;\nsrc[LONECARET] = '(?:\\\\^)';\n\nvar CARETTRIM = R++;\nsrc[CARETTRIM] = '(\\\\s*)' + src[LONECARET] + '\\\\s+';\nre[CARETTRIM] = new RegExp(src[CARETTRIM], 'g');\nvar caretTrimReplace = '$1^';\n\nvar CARET = R++;\nsrc[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$';\nvar CARETLOOSE = R++;\nsrc[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$';\n\n// A simple gt/lt/eq thing, or just \"\" to indicate \"any version\"\nvar COMPARATORLOOSE = R++;\nsrc[COMPARATORLOOSE] = '^' + src[GTLT] + '\\\\s*(' + LOOSEPLAIN + ')$|^$';\nvar COMPARATOR = R++;\nsrc[COMPARATOR] = '^' + src[GTLT] + '\\\\s*(' + FULLPLAIN + ')$|^$';\n\n\n// An expression to strip any whitespace between the gtlt and the thing\n// it modifies, so that `> 1.2.3` ==> `>1.2.3`\nvar COMPARATORTRIM = R++;\nsrc[COMPARATORTRIM] = '(\\\\s*)' + src[GTLT] +\n '\\\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')';\n\n// this one has to use the /g flag\nre[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g');\nvar comparatorTrimReplace = '$1$2$3';\n\n\n// Something like `1.2.3 - 1.2.4`\n// Note that these all use the loose form, because they'll be\n// checked against either the strict or loose comparator form\n// later.\nvar HYPHENRANGE = R++;\nsrc[HYPHENRANGE] = '^\\\\s*(' + src[XRANGEPLAIN] + ')' +\n '\\\\s+-\\\\s+' +\n '(' + src[XRANGEPLAIN] + ')' +\n '\\\\s*$';\n\nvar HYPHENRANGELOOSE = R++;\nsrc[HYPHENRANGELOOSE] = '^\\\\s*(' + src[XRANGEPLAINLOOSE] + ')' +\n '\\\\s+-\\\\s+' +\n '(' + src[XRANGEPLAINLOOSE] + ')' +\n '\\\\s*$';\n\n// Star ranges basically just allow anything at all.\nvar STAR = R++;\nsrc[STAR] = '(<|>)?=?\\\\s*\\\\*';\n\n// Compile to actual regexp objects.\n// All are flag-free, unless they were created above with a flag.\nfor (var i = 0; i < R; i++) {\n debug(i, src[i]);\n if (!re[i])\n re[i] = new RegExp(src[i]);\n}\n\nexports.parse = parse;\nfunction parse(version, loose) {\n var r = loose ? re[LOOSE] : re[FULL];\n return (r.test(version)) ? new SemVer(version, loose) : null;\n}\n\nexports.valid = valid;\nfunction valid(version, loose) {\n var v = parse(version, loose);\n return v ? v.version : null;\n}\n\n\nexports.clean = clean;\nfunction clean(version, loose) {\n var s = parse(version.trim().replace(/^[=v]+/, ''), loose);\n return s ? s.version : null;\n}\n\nexports.SemVer = SemVer;\n\nfunction SemVer(version, loose) {\n if (version instanceof SemVer) {\n if (version.loose === loose)\n return version;\n else\n version = version.version;\n } else if (typeof version !== 'string') {\n throw new TypeError('Invalid Version: ' + version);\n }\n\n if (!(this instanceof SemVer))\n return new SemVer(version, loose);\n\n debug('SemVer', version, loose);\n this.loose = loose;\n var m = version.trim().match(loose ? re[LOOSE] : re[FULL]);\n\n if (!m)\n throw new TypeError('Invalid Version: ' + version);\n\n this.raw = version;\n\n // these are actually numbers\n this.major = +m[1];\n this.minor = +m[2];\n this.patch = +m[3];\n\n // numberify any prerelease numeric ids\n if (!m[4])\n this.prerelease = [];\n else\n this.prerelease = m[4].split('.').map(function(id) {\n return (/^[0-9]+$/.test(id)) ? +id : id;\n });\n\n this.build = m[5] ? m[5].split('.') : [];\n this.format();\n}\n\nSemVer.prototype.format = function() {\n this.version = this.major + '.' + this.minor + '.' + this.patch;\n if (this.prerelease.length)\n this.version += '-' + this.prerelease.join('.');\n return this.version;\n};\n\nSemVer.prototype.inspect = function() {\n return '';\n};\n\nSemVer.prototype.toString = function() {\n return this.version;\n};\n\nSemVer.prototype.compare = function(other) {\n debug('SemVer.compare', this.version, this.loose, other);\n if (!(other instanceof SemVer))\n other = new SemVer(other, this.loose);\n\n return this.compareMain(other) || this.comparePre(other);\n};\n\nSemVer.prototype.compareMain = function(other) {\n if (!(other instanceof SemVer))\n other = new SemVer(other, this.loose);\n\n return compareIdentifiers(this.major, other.major) ||\n compareIdentifiers(this.minor, other.minor) ||\n compareIdentifiers(this.patch, other.patch);\n};\n\nSemVer.prototype.comparePre = function(other) {\n if (!(other instanceof SemVer))\n other = new SemVer(other, this.loose);\n\n // NOT having a prerelease is > having one\n if (this.prerelease.length && !other.prerelease.length)\n return -1;\n else if (!this.prerelease.length && other.prerelease.length)\n return 1;\n else if (!this.prerelease.length && !other.prerelease.length)\n return 0;\n\n var i = 0;\n do {\n var a = this.prerelease[i];\n var b = other.prerelease[i];\n debug('prerelease compare', i, a, b);\n if (a === undefined && b === undefined)\n return 0;\n else if (b === undefined)\n return 1;\n else if (a === undefined)\n return -1;\n else if (a === b)\n continue;\n else\n return compareIdentifiers(a, b);\n } while (++i);\n};\n\n// preminor will bump the version up to the next minor release, and immediately\n// down to pre-release. premajor and prepatch work the same way.\nSemVer.prototype.inc = function(release, identifier) {\n switch (release) {\n case 'premajor':\n this.prerelease.length = 0;\n this.patch = 0;\n this.minor = 0;\n this.major++;\n this.inc('pre', identifier);\n break;\n case 'preminor':\n this.prerelease.length = 0;\n this.patch = 0;\n this.minor++;\n this.inc('pre', identifier);\n break;\n case 'prepatch':\n // If this is already a prerelease, it will bump to the next version\n // drop any prereleases that might already exist, since they are not\n // relevant at this point.\n this.prerelease.length = 0;\n this.inc('patch', identifier);\n this.inc('pre', identifier);\n break;\n // If the input is a non-prerelease version, this acts the same as\n // prepatch.\n case 'prerelease':\n if (this.prerelease.length === 0)\n this.inc('patch', identifier);\n this.inc('pre', identifier);\n break;\n\n case 'major':\n // If this is a pre-major version, bump up to the same major version.\n // Otherwise increment major.\n // 1.0.0-5 bumps to 1.0.0\n // 1.1.0 bumps to 2.0.0\n if (this.minor !== 0 || this.patch !== 0 || this.prerelease.length === 0)\n this.major++;\n this.minor = 0;\n this.patch = 0;\n this.prerelease = [];\n break;\n case 'minor':\n // If this is a pre-minor version, bump up to the same minor version.\n // Otherwise increment minor.\n // 1.2.0-5 bumps to 1.2.0\n // 1.2.1 bumps to 1.3.0\n if (this.patch !== 0 || this.prerelease.length === 0)\n this.minor++;\n this.patch = 0;\n this.prerelease = [];\n break;\n case 'patch':\n // If this is not a pre-release version, it will increment the patch.\n // If it is a pre-release it will bump up to the same patch version.\n // 1.2.0-5 patches to 1.2.0\n // 1.2.0 patches to 1.2.1\n if (this.prerelease.length === 0)\n this.patch++;\n this.prerelease = [];\n break;\n // This probably shouldn't be used publicly.\n // 1.0.0 \"pre\" would become 1.0.0-0 which is the wrong direction.\n case 'pre':\n if (this.prerelease.length === 0)\n this.prerelease = [0];\n else {\n var i = this.prerelease.length;\n while (--i >= 0) {\n if (typeof this.prerelease[i] === 'number') {\n this.prerelease[i]++;\n i = -2;\n }\n }\n if (i === -1) // didn't increment anything\n this.prerelease.push(0);\n }\n if (identifier) {\n // 1.2.0-beta.1 bumps to 1.2.0-beta.2,\n // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0\n if (this.prerelease[0] === identifier) {\n if (isNaN(this.prerelease[1]))\n this.prerelease = [identifier, 0];\n } else\n this.prerelease = [identifier, 0];\n }\n break;\n\n default:\n throw new Error('invalid increment argument: ' + release);\n }\n this.format();\n return this;\n};\n\nexports.inc = inc;\nfunction inc(version, release, loose, identifier) {\n if (typeof(loose) === 'string') {\n identifier = loose;\n loose = undefined;\n }\n\n try {\n return new SemVer(version, loose).inc(release, identifier).version;\n } catch (er) {\n return null;\n }\n}\n\nexports.diff = diff;\nfunction diff(version1, version2) {\n if (eq(version1, version2)) {\n return null;\n } else {\n var v1 = parse(version1);\n var v2 = parse(version2);\n if (v1.prerelease.length || v2.prerelease.length) {\n for (var key in v1) {\n if (key === 'major' || key === 'minor' || key === 'patch') {\n if (v1[key] !== v2[key]) {\n return 'pre'+key;\n }\n }\n }\n return 'prerelease';\n }\n for (var key in v1) {\n if (key === 'major' || key === 'minor' || key === 'patch') {\n if (v1[key] !== v2[key]) {\n return key;\n }\n }\n }\n }\n}\n\nexports.compareIdentifiers = compareIdentifiers;\n\nvar numeric = /^[0-9]+$/;\nfunction compareIdentifiers(a, b) {\n var anum = numeric.test(a);\n var bnum = numeric.test(b);\n\n if (anum && bnum) {\n a = +a;\n b = +b;\n }\n\n return (anum && !bnum) ? -1 :\n (bnum && !anum) ? 1 :\n a < b ? -1 :\n a > b ? 1 :\n 0;\n}\n\nexports.rcompareIdentifiers = rcompareIdentifiers;\nfunction rcompareIdentifiers(a, b) {\n return compareIdentifiers(b, a);\n}\n\nexports.major = major;\nfunction major(a, loose) {\n return new SemVer(a, loose).major;\n}\n\nexports.minor = minor;\nfunction minor(a, loose) {\n return new SemVer(a, loose).minor;\n}\n\nexports.patch = patch;\nfunction patch(a, loose) {\n return new SemVer(a, loose).patch;\n}\n\nexports.compare = compare;\nfunction compare(a, b, loose) {\n return new SemVer(a, loose).compare(b);\n}\n\nexports.compareLoose = compareLoose;\nfunction compareLoose(a, b) {\n return compare(a, b, true);\n}\n\nexports.rcompare = rcompare;\nfunction rcompare(a, b, loose) {\n return compare(b, a, loose);\n}\n\nexports.sort = sort;\nfunction sort(list, loose) {\n return list.sort(function(a, b) {\n return exports.compare(a, b, loose);\n });\n}\n\nexports.rsort = rsort;\nfunction rsort(list, loose) {\n return list.sort(function(a, b) {\n return exports.rcompare(a, b, loose);\n });\n}\n\nexports.gt = gt;\nfunction gt(a, b, loose) {\n return compare(a, b, loose) > 0;\n}\n\nexports.lt = lt;\nfunction lt(a, b, loose) {\n return compare(a, b, loose) < 0;\n}\n\nexports.eq = eq;\nfunction eq(a, b, loose) {\n return compare(a, b, loose) === 0;\n}\n\nexports.neq = neq;\nfunction neq(a, b, loose) {\n return compare(a, b, loose) !== 0;\n}\n\nexports.gte = gte;\nfunction gte(a, b, loose) {\n return compare(a, b, loose) >= 0;\n}\n\nexports.lte = lte;\nfunction lte(a, b, loose) {\n return compare(a, b, loose) <= 0;\n}\n\nexports.cmp = cmp;\nfunction cmp(a, op, b, loose) {\n var ret;\n switch (op) {\n case '===':\n if (typeof a === 'object') a = a.version;\n if (typeof b === 'object') b = b.version;\n ret = a === b;\n break;\n case '!==':\n if (typeof a === 'object') a = a.version;\n if (typeof b === 'object') b = b.version;\n ret = a !== b;\n break;\n case '': case '=': case '==': ret = eq(a, b, loose); break;\n case '!=': ret = neq(a, b, loose); break;\n case '>': ret = gt(a, b, loose); break;\n case '>=': ret = gte(a, b, loose); break;\n case '<': ret = lt(a, b, loose); break;\n case '<=': ret = lte(a, b, loose); break;\n default: throw new TypeError('Invalid operator: ' + op);\n }\n return ret;\n}\n\nexports.Comparator = Comparator;\nfunction Comparator(comp, loose) {\n if (comp instanceof Comparator) {\n if (comp.loose === loose)\n return comp;\n else\n comp = comp.value;\n }\n\n if (!(this instanceof Comparator))\n return new Comparator(comp, loose);\n\n debug('comparator', comp, loose);\n this.loose = loose;\n this.parse(comp);\n\n if (this.semver === ANY)\n this.value = '';\n else\n this.value = this.operator + this.semver.version;\n\n debug('comp', this);\n}\n\nvar ANY = {};\nComparator.prototype.parse = function(comp) {\n var r = this.loose ? re[COMPARATORLOOSE] : re[COMPARATOR];\n var m = comp.match(r);\n\n if (!m)\n throw new TypeError('Invalid comparator: ' + comp);\n\n this.operator = m[1];\n if (this.operator === '=')\n this.operator = '';\n\n // if it literally is just '>' or '' then allow anything.\n if (!m[2])\n this.semver = ANY;\n else\n this.semver = new SemVer(m[2], this.loose);\n};\n\nComparator.prototype.inspect = function() {\n return '';\n};\n\nComparator.prototype.toString = function() {\n return this.value;\n};\n\nComparator.prototype.test = function(version) {\n debug('Comparator.test', version, this.loose);\n\n if (this.semver === ANY)\n return true;\n\n if (typeof version === 'string')\n version = new SemVer(version, this.loose);\n\n return cmp(version, this.operator, this.semver, this.loose);\n};\n\n\nexports.Range = Range;\nfunction Range(range, loose) {\n if ((range instanceof Range) && range.loose === loose)\n return range;\n\n if (!(this instanceof Range))\n return new Range(range, loose);\n\n this.loose = loose;\n\n // First, split based on boolean or ||\n this.raw = range;\n this.set = range.split(/\\s*\\|\\|\\s*/).map(function(range) {\n return this.parseRange(range.trim());\n }, this).filter(function(c) {\n // throw out any that are not relevant for whatever reason\n return c.length;\n });\n\n if (!this.set.length) {\n throw new TypeError('Invalid SemVer Range: ' + range);\n }\n\n this.format();\n}\n\nRange.prototype.inspect = function() {\n return '';\n};\n\nRange.prototype.format = function() {\n this.range = this.set.map(function(comps) {\n return comps.join(' ').trim();\n }).join('||').trim();\n return this.range;\n};\n\nRange.prototype.toString = function() {\n return this.range;\n};\n\nRange.prototype.parseRange = function(range) {\n var loose = this.loose;\n range = range.trim();\n debug('range', range, loose);\n // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`\n var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE];\n range = range.replace(hr, hyphenReplace);\n debug('hyphen replace', range);\n // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`\n range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace);\n debug('comparator trim', range, re[COMPARATORTRIM]);\n\n // `~ 1.2.3` => `~1.2.3`\n range = range.replace(re[TILDETRIM], tildeTrimReplace);\n\n // `^ 1.2.3` => `^1.2.3`\n range = range.replace(re[CARETTRIM], caretTrimReplace);\n\n // normalize spaces\n range = range.split(/\\s+/).join(' ');\n\n // At this point, the range is completely trimmed and\n // ready to be split into comparators.\n\n var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR];\n var set = range.split(' ').map(function(comp) {\n return parseComparator(comp, loose);\n }).join(' ').split(/\\s+/);\n if (this.loose) {\n // in loose mode, throw out any that are not valid comparators\n set = set.filter(function(comp) {\n return !!comp.match(compRe);\n });\n }\n set = set.map(function(comp) {\n return new Comparator(comp, loose);\n });\n\n return set;\n};\n\n// Mostly just for testing and legacy API reasons\nexports.toComparators = toComparators;\nfunction toComparators(range, loose) {\n return new Range(range, loose).set.map(function(comp) {\n return comp.map(function(c) {\n return c.value;\n }).join(' ').trim().split(' ');\n });\n}\n\n// comprised of xranges, tildes, stars, and gtlt's at this point.\n// already replaced the hyphen ranges\n// turn into a set of JUST comparators.\nfunction parseComparator(comp, loose) {\n debug('comp', comp);\n comp = replaceCarets(comp, loose);\n debug('caret', comp);\n comp = replaceTildes(comp, loose);\n debug('tildes', comp);\n comp = replaceXRanges(comp, loose);\n debug('xrange', comp);\n comp = replaceStars(comp, loose);\n debug('stars', comp);\n return comp;\n}\n\nfunction isX(id) {\n return !id || id.toLowerCase() === 'x' || id === '*';\n}\n\n// ~, ~> --> * (any, kinda silly)\n// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0\n// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0\n// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0\n// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0\n// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0\nfunction replaceTildes(comp, loose) {\n return comp.trim().split(/\\s+/).map(function(comp) {\n return replaceTilde(comp, loose);\n }).join(' ');\n}\n\nfunction replaceTilde(comp, loose) {\n var r = loose ? re[TILDELOOSE] : re[TILDE];\n return comp.replace(r, function(_, M, m, p, pr) {\n debug('tilde', comp, _, M, m, p, pr);\n var ret;\n\n if (isX(M))\n ret = '';\n else if (isX(m))\n ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0';\n else if (isX(p))\n // ~1.2 == >=1.2.0- <1.3.0-\n ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0';\n else if (pr) {\n debug('replaceTilde pr', pr);\n if (pr.charAt(0) !== '-')\n pr = '-' + pr;\n ret = '>=' + M + '.' + m + '.' + p + pr +\n ' <' + M + '.' + (+m + 1) + '.0';\n } else\n // ~1.2.3 == >=1.2.3 <1.3.0\n ret = '>=' + M + '.' + m + '.' + p +\n ' <' + M + '.' + (+m + 1) + '.0';\n\n debug('tilde return', ret);\n return ret;\n });\n}\n\n// ^ --> * (any, kinda silly)\n// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0\n// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0\n// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0\n// ^1.2.3 --> >=1.2.3 <2.0.0\n// ^1.2.0 --> >=1.2.0 <2.0.0\nfunction replaceCarets(comp, loose) {\n return comp.trim().split(/\\s+/).map(function(comp) {\n return replaceCaret(comp, loose);\n }).join(' ');\n}\n\nfunction replaceCaret(comp, loose) {\n debug('caret', comp, loose);\n var r = loose ? re[CARETLOOSE] : re[CARET];\n return comp.replace(r, function(_, M, m, p, pr) {\n debug('caret', comp, _, M, m, p, pr);\n var ret;\n\n if (isX(M))\n ret = '';\n else if (isX(m))\n ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0';\n else if (isX(p)) {\n if (M === '0')\n ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0';\n else\n ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0';\n } else if (pr) {\n debug('replaceCaret pr', pr);\n if (pr.charAt(0) !== '-')\n pr = '-' + pr;\n if (M === '0') {\n if (m === '0')\n ret = '>=' + M + '.' + m + '.' + p + pr +\n ' <' + M + '.' + m + '.' + (+p + 1);\n else\n ret = '>=' + M + '.' + m + '.' + p + pr +\n ' <' + M + '.' + (+m + 1) + '.0';\n } else\n ret = '>=' + M + '.' + m + '.' + p + pr +\n ' <' + (+M + 1) + '.0.0';\n } else {\n debug('no pr');\n if (M === '0') {\n if (m === '0')\n ret = '>=' + M + '.' + m + '.' + p +\n ' <' + M + '.' + m + '.' + (+p + 1);\n else\n ret = '>=' + M + '.' + m + '.' + p +\n ' <' + M + '.' + (+m + 1) + '.0';\n } else\n ret = '>=' + M + '.' + m + '.' + p +\n ' <' + (+M + 1) + '.0.0';\n }\n\n debug('caret return', ret);\n return ret;\n });\n}\n\nfunction replaceXRanges(comp, loose) {\n debug('replaceXRanges', comp, loose);\n return comp.split(/\\s+/).map(function(comp) {\n return replaceXRange(comp, loose);\n }).join(' ');\n}\n\nfunction replaceXRange(comp, loose) {\n comp = comp.trim();\n var r = loose ? re[XRANGELOOSE] : re[XRANGE];\n return comp.replace(r, function(ret, gtlt, M, m, p, pr) {\n debug('xRange', comp, ret, gtlt, M, m, p, pr);\n var xM = isX(M);\n var xm = xM || isX(m);\n var xp = xm || isX(p);\n var anyX = xp;\n\n if (gtlt === '=' && anyX)\n gtlt = '';\n\n if (xM) {\n if (gtlt === '>' || gtlt === '<') {\n // nothing is allowed\n ret = '<0.0.0';\n } else {\n // nothing is forbidden\n ret = '*';\n }\n } else if (gtlt && anyX) {\n // replace X with 0\n if (xm)\n m = 0;\n if (xp)\n p = 0;\n\n if (gtlt === '>') {\n // >1 => >=2.0.0\n // >1.2 => >=1.3.0\n // >1.2.3 => >= 1.2.4\n gtlt = '>=';\n if (xm) {\n M = +M + 1;\n m = 0;\n p = 0;\n } else if (xp) {\n m = +m + 1;\n p = 0;\n }\n } else if (gtlt === '<=') {\n // <=0.7.x is actually <0.8.0, since any 0.7.x should\n // pass. Similarly, <=7.x is actually <8.0.0, etc.\n gtlt = '<'\n if (xm)\n M = +M + 1\n else\n m = +m + 1\n }\n\n ret = gtlt + M + '.' + m + '.' + p;\n } else if (xm) {\n ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0';\n } else if (xp) {\n ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0';\n }\n\n debug('xRange return', ret);\n\n return ret;\n });\n}\n\n// Because * is AND-ed with everything else in the comparator,\n// and '' means \"any version\", just remove the *s entirely.\nfunction replaceStars(comp, loose) {\n debug('replaceStars', comp, loose);\n // Looseness is ignored here. star is always as loose as it gets!\n return comp.trim().replace(re[STAR], '');\n}\n\n// This function is passed to string.replace(re[HYPHENRANGE])\n// M, m, patch, prerelease, build\n// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5\n// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do\n// 1.2 - 3.4 => >=1.2.0 <3.5.0\nfunction hyphenReplace($0,\n from, fM, fm, fp, fpr, fb,\n to, tM, tm, tp, tpr, tb) {\n\n if (isX(fM))\n from = '';\n else if (isX(fm))\n from = '>=' + fM + '.0.0';\n else if (isX(fp))\n from = '>=' + fM + '.' + fm + '.0';\n else\n from = '>=' + from;\n\n if (isX(tM))\n to = '';\n else if (isX(tm))\n to = '<' + (+tM + 1) + '.0.0';\n else if (isX(tp))\n to = '<' + tM + '.' + (+tm + 1) + '.0';\n else if (tpr)\n to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr;\n else\n to = '<=' + to;\n\n return (from + ' ' + to).trim();\n}\n\n\n// if ANY of the sets match ALL of its comparators, then pass\nRange.prototype.test = function(version) {\n if (!version)\n return false;\n\n if (typeof version === 'string')\n version = new SemVer(version, this.loose);\n\n for (var i = 0; i < this.set.length; i++) {\n if (testSet(this.set[i], version))\n return true;\n }\n return false;\n};\n\nfunction testSet(set, version) {\n for (var i = 0; i < set.length; i++) {\n if (!set[i].test(version))\n return false;\n }\n\n if (version.prerelease.length) {\n // Find the set of versions that are allowed to have prereleases\n // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0\n // That should allow `1.2.3-pr.2` to pass.\n // However, `1.2.4-alpha.notready` should NOT be allowed,\n // even though it's within the range set by the comparators.\n for (var i = 0; i < set.length; i++) {\n debug(set[i].semver);\n if (set[i].semver === ANY)\n return true;\n\n if (set[i].semver.prerelease.length > 0) {\n var allowed = set[i].semver;\n if (allowed.major === version.major &&\n allowed.minor === version.minor &&\n allowed.patch === version.patch)\n return true;\n }\n }\n\n // Version has a -pre, but it's not one of the ones we like.\n return false;\n }\n\n return true;\n}\n\nexports.satisfies = satisfies;\nfunction satisfies(version, range, loose) {\n try {\n range = new Range(range, loose);\n } catch (er) {\n return false;\n }\n return range.test(version);\n}\n\nexports.maxSatisfying = maxSatisfying;\nfunction maxSatisfying(versions, range, loose) {\n return versions.filter(function(version) {\n return satisfies(version, range, loose);\n }).sort(function(a, b) {\n return rcompare(a, b, loose);\n })[0] || null;\n}\n\nexports.validRange = validRange;\nfunction validRange(range, loose) {\n try {\n // Return '*' instead of '' so that truthiness works.\n // This will throw if it's invalid anyway\n return new Range(range, loose).range || '*';\n } catch (er) {\n return null;\n }\n}\n\n// Determine if version is less than all the versions possible in the range\nexports.ltr = ltr;\nfunction ltr(version, range, loose) {\n return outside(version, range, '<', loose);\n}\n\n// Determine if version is greater than all the versions possible in the range.\nexports.gtr = gtr;\nfunction gtr(version, range, loose) {\n return outside(version, range, '>', loose);\n}\n\nexports.outside = outside;\nfunction outside(version, range, hilo, loose) {\n version = new SemVer(version, loose);\n range = new Range(range, loose);\n\n var gtfn, ltefn, ltfn, comp, ecomp;\n switch (hilo) {\n case '>':\n gtfn = gt;\n ltefn = lte;\n ltfn = lt;\n comp = '>';\n ecomp = '>=';\n break;\n case '<':\n gtfn = lt;\n ltefn = gte;\n ltfn = gt;\n comp = '<';\n ecomp = '<=';\n break;\n default:\n throw new TypeError('Must provide a hilo val of \"<\" or \">\"');\n }\n\n // If it satisifes the range it is not outside\n if (satisfies(version, range, loose)) {\n return false;\n }\n\n // From now on, variable terms are as if we're in \"gtr\" mode.\n // but note that everything is flipped for the \"ltr\" function.\n\n for (var i = 0; i < range.set.length; ++i) {\n var comparators = range.set[i];\n\n var high = null;\n var low = null;\n\n comparators.forEach(function(comparator) {\n high = high || comparator;\n low = low || comparator;\n if (gtfn(comparator.semver, high.semver, loose)) {\n high = comparator;\n } else if (ltfn(comparator.semver, low.semver, loose)) {\n low = comparator;\n }\n });\n\n // If the edge version comparator has a operator then our version\n // isn't outside it\n if (high.operator === comp || high.operator === ecomp) {\n return false;\n }\n\n // If the lowest version comparator has an operator and our version\n // is less than it then it isn't higher than the range\n if ((!low.operator || low.operator === comp) &&\n ltefn(version, low.semver)) {\n return false;\n } else if (low.operator === ecomp && ltfn(version, low.semver)) {\n return false;\n }\n }\n return true;\n}\n\n// Use the define() function if we're in AMD land\nif (typeof define === 'function' && define.amd)\n define(exports);\n","valid":"var semver = require(\"semver\")\nexports.name = validName\nexports.package = validPackage\nfunction validName(name) {\n if (!name) return false\n var n = name.replace(/^\\s+|\\s+$/g, '')\n if (!n || n.charAt(0) === \".\"\n || !n.match(/^[a-zA-Z0-9]/)\n || n.match(/[\\/\\(\\)&\\?#\\|<>@:%\\s\\\\\\*'\"!~`]/)\n || n.toLowerCase() === \"node_modules\"\n || n !== encodeURIComponent(n)\n || n.toLowerCase() === \"favicon.ico\") {\n return false\n }\n return n\n }\nfunction validPackage(pkg) {\n return validName(pkg.name) && semver.valid(pkg.version)\n }","Date":"exports.parse = parse\nexports.toISOString = toISOString\nexports.now = now\nfunction now() {\n return new Date().getTime()\n }\nfunction parse(s) {\n // s is something like \"2010-12-29T07:31:06Z\"\n s = s.split(\"T\")\n var ds = s[0]\n , ts = s[1]\n , d = new Date()\n ds = ds.split(\"-\")\n ts = ts.split(\":\")\n var tz = ts[2].substr(2)\n ts[2] = ts[2].substr(0, 2)\n d.setUTCFullYear(+ds[0])\n d.setUTCMonth(+ds[1]-1)\n d.setUTCDate(+ds[2])\n d.setUTCHours(+ts[0])\n d.setUTCMinutes(+ts[1])\n d.setUTCSeconds(+ts[2])\n d.setUTCMilliseconds(0)\n return d.getTime()\n }\nexports.toISOString = toISOString\nfunction toISOString() { return ISODateString(this) }\nfunction pad(n){return n<10 ? '0'+n : n}\nfunction ISODateString(d){\n return d.getUTCFullYear()+'-'\n + pad(d.getUTCMonth()+1)+'-'\n + pad(d.getUTCDate())+'T'\n + pad(d.getUTCHours())+':'\n + pad(d.getUTCMinutes())+':'\n + pad(d.getUTCSeconds())+'Z'}","Object":"exports.keys = keys\nfunction keys(o) {\n var a = []\n for (var i in o) a.push(i)\n return a }","Array":"exports.isArray = isArray\nexports.forEach = forEach\n\nexports.reduce = reduce\n\nfunction forEach(fn) {\n for (var i = 0, l = this.length; i < l; i ++) {\n if (this.hasOwnProperty(i)) {\n fn(this[i], i, this)\n }\n }\n }\nfunction reduce(callback, initialValue) {\n var previousValue = initialValue || this[0];\n for (var i = initialValue ? 0 : 1; i < this.length; i++) {\n previousValue = callback(previousValue, this[i], i, this);\n }\n return previousValue;\n }\nfunction isArray(a) {\n return a instanceof Array\n || Object.prototype.toString.call(a) === \"[object Array]\"\n || (typeof a === \"object\" && typeof a.length === \"number\") }","String":"exports.trim = trim\nfunction trim() {\n return this.replace(/^\\s+|\\s+$/g, \"\")\n }","monkeypatch":"exports.patch = patch\nfunction patch(Object, Date, Array, String) {\n if (!Date.parse || isNaN(Date.parse(\"2010-12-29T07:31:06Z\"))) {\n Date.parse = require(\"Date\").parse\n }\n\n Date.prototype.toISOString = Date.prototype.toISOString\n || require(\"Date\").toISOString\n\n Date.now = Date.now\n || require(\"Date\").now\n\n Object.keys = Object.keys\n || require(\"Object\").keys\n\n Array.prototype.forEach = Array.prototype.forEach\n || require(\"Array\").forEach\n\n Array.prototype.reduce = Array.prototype.reduce\n || require(\"Array\").reduce\n\n Array.isArray = Array.isArray\n || require(\"Array\").isArray\n\n String.prototype.trim = String.prototype.trim\n || require(\"String\").trim\n }","attachments_md5":{},"deploy_version":"v2.6.7","scope":"exports.parse = parse\nexports.isScoped = isScoped\nexports.isGlobal = isGlobal\nfunction parse(name) {\n var m = name.match(/^(?:@([^\\/]+)\\/)?([^\\/]+)$/)\n return m ? [m[1], m[2]] : []\n }\nfunction isScoped(name) {\n return !!(parse(name)[0])\n }\nfunction isGlobal(name) {\n return !isScoped(name)\n }"}