diff options
author | Ralph Amissah <ralph.amissah@gmail.com> | 2021-10-23 19:49:43 -0400 |
---|---|---|
committer | Ralph Amissah <ralph.amissah@gmail.com> | 2021-11-27 19:51:25 -0500 |
commit | c4f6c86999ec8fe3b610ac269a121c9fa424daf5 (patch) | |
tree | 9251611c030c29af99bfa59bda94521c3500ad94 /sundry/spine_search_cgi | |
parent | nix .envrc related (diff) |
dlang dub & nix build related, includes dep update
- nix-shell --pure
- nix-build
- dub build --compiler=ldc2 --build=release --force
Diffstat (limited to 'sundry/spine_search_cgi')
8 files changed, 382 insertions, 182 deletions
diff --git a/sundry/spine_search_cgi/default.nix b/sundry/spine_search_cgi/default.nix index f9cc984..72f4f02 100755 --- a/sundry/spine_search_cgi/default.nix +++ b/sundry/spine_search_cgi/default.nix @@ -1,114 +1,3 @@ #!/usr/bin/env -S nix-build -{ pkgs ? import <nixpkgs> {}, - stdenv ? pkgs.stdenv, - lib ? pkgs.lib, - ldc ? null, - dcompiler ? pkgs.ldc, - dub ? pkgs.dub -}: -assert dcompiler != null; -with ( - assert dcompiler != null; - with lib; - let - # Filter function to remove the .dub package folder from src - filterDub = name: type: let baseName = baseNameOf (toString name); in ! ( - type == "directory" && baseName == ".dub" - ); - targetOf = package: "${package.targetPath or "."}/${package.targetName or package.name}"; - # Remove reference to build tools and library sources - disallowedReferences = deps: [ dcompiler dub ]; - removeExpr = refs: ''remove-references-to ${lib.concatMapStrings (ref: " -t ${ref}") refs}''; - in { - mkDubDerivation = lib.makeOverridable ({ - src, - nativeBuildInputs ? [], - dubJSON ? src + "/dub.json", - passthru ? {}, - package ? lib.importJSON dubJSON, - ... - } @ attrs: stdenv.mkDerivation (attrs // { - pname = package.name; - nativeBuildInputs = [ dcompiler dub pkgs.removeReferencesTo ] ++ nativeBuildInputs; - disallowedReferences = disallowedReferences deps; - passthru = passthru // { - inherit dub dcompiler pkgs; - }; - src = lib.cleanSourceWith { - filter = filterDub; - src = lib.cleanSource src; - }; - preFixup = '' - find $out/share/cgi-bin -type f -exec ${removeExpr (disallowedReferences deps)} '{}' + || true - ''; - buildPhase = '' - runHook preBuild - export HOME=$PWD - for dc_ in dmd ldmd2 gdmd; do - echo "- check for D compiler $dc_" - dc=$(type -P $dc_ || echo "") - if [ ! "$dc" == "" ]; then - break - fi - done - if [ "$dc" == "" ]; then - exit "Error: could not find D compiler" - fi - echo "$dc_ used as D compiler to build $pname" - dub build --compiler=$dc --build=release --combined --skip-registry=all - runHook postBuild - ''; - checkPhase = '' - runHook preCheck - export HOME=$PWD - dub test --combined --skip-registry=all - runHook postCheck - ''; - installPhase = '' - runHook preInstall - mkdir -p $out/share/cgi-bin - cp -r "${targetOf package}" $out/share/cgi-bin - install -m755 -D $out/share/cgi-bin/spine_search spine_search - runHook postInstall - ''; - postInstall = '' - echo "HERE ${targetOf package} $out/share/cgi-bin" - echo `ls -la $out/share/cgi-bin/spine_search` - ''; - meta = lib.optionalAttrs (package ? description) { - description = package.description; - } // attrs.meta or {}; - } // lib.optionalAttrs (!(attrs ? version)) { - # Use name from dub.json, unless pname and version are specified - name = package.name; - })); - } -); -mkDubDerivation rec { - name = "spine-search-${version}"; - version = "0.11.3"; - src = ./.; - buildInputs = [ - pkgs.sqlite ( - with pkgs; [ - nixFlakes - rund - dub - ldc - sqlite - ] - ) - ]; - # # buildPhase = [ ]; - # installPhase = '' - # install -m755 -D spine_search $out/bin/spine-search - # echo "built $out/bin/spine-search" - # ''; - meta = with pkgs.lib; { - homepage = https://sisudoc.org; - description = "a sisu like document parser"; - license = licenses.agpl3Plus; - platforms = platforms.linux; - maintainers = [ RalphAmissah ]; - }; -} +{ pkgs ? import <nixpkgs> {} }: +pkgs.callPackage ./derivation.nix {} diff --git a/sundry/spine_search_cgi/derivation.nix b/sundry/spine_search_cgi/derivation.nix new file mode 100644 index 0000000..5bde866 --- /dev/null +++ b/sundry/spine_search_cgi/derivation.nix @@ -0,0 +1,108 @@ +{ pkgs ? import <nixpkgs> {}, + stdenv ? pkgs.stdenv, + lib ? pkgs.lib, + ldc ? null, + dcompiler ? pkgs.ldc, + dub ? pkgs.dub +}: +assert dcompiler != null; +with ( + assert dcompiler != null; + with lib; + let + # Filter function to remove the .dub package folder from src + filterDub = name: type: let baseName = baseNameOf (toString name); in ! ( + type == "directory" && baseName == ".dub" + ); + targetOf = package: "${package.targetPath or "."}/${package.targetName or package.name}"; + # Remove reference to build tools and library sources + disallowedReferences = deps: [ dcompiler dub ]; + removeExpr = refs: ''remove-references-to ${lib.concatMapStrings (ref: " -t ${ref}") refs}''; + in { + mkDubDerivation = lib.makeOverridable ({ + src, + nativeBuildInputs ? [], + dubJSON ? src + "/dub.json", + passthru ? {}, + package ? lib.importJSON dubJSON, + ... + } @ attrs: stdenv.mkDerivation (attrs // { + pname = package.name; + nativeBuildInputs = [ dcompiler dub pkgs.removeReferencesTo ] ++ nativeBuildInputs; + disallowedReferences = disallowedReferences deps; + passthru = passthru // { + inherit dub dcompiler pkgs; + }; + src = lib.cleanSourceWith { + filter = filterDub; + src = lib.cleanSource src; + }; + preFixup = '' + find $out/share/cgi-bin -type f -exec ${removeExpr (disallowedReferences deps)} '{}' + || true + ''; + buildPhase = '' + runHook preBuild + export HOME=$PWD + for dc_ in dmd ldmd2 gdmd; do + echo "- check for D compiler $dc_" + dc=$(type -P $dc_ || echo "") + if [ ! "$dc" == "" ]; then + break + fi + done + if [ "$dc" == "" ]; then + exit "Error: could not find D compiler" + fi + echo "$dc_ used as D compiler to build $pname" + dub build --compiler=$dc --build=release --combined --skip-registry=all + runHook postBuild + ''; + checkPhase = '' + runHook preCheck + export HOME=$PWD + dub test --combined --skip-registry=all + runHook postCheck + ''; + installPhase = '' + runHook preInstall + mkdir -p $out/share/cgi-bin + cp -r "${targetOf package}" $out/share/cgi-bin + install -m755 -D $out/share/cgi-bin/spine_search spine_search + runHook postInstall + ''; + postInstall = '' + echo "HERE ${targetOf package} $out/share/cgi-bin" + echo `ls -la $out/share/cgi-bin/spine_search` + ''; + meta = lib.optionalAttrs (package ? description) { + description = package.description; + } // attrs.meta or {}; + } // lib.optionalAttrs (!(attrs ? version)) { + # Use name from dub.json, unless pname and version are specified + name = package.name; + })); + } +); +mkDubDerivation rec { + name = "spine-search-${version}"; + version = "0.11.3"; + src = ./.; + buildInputs = [ + pkgs.sqlite ( + with pkgs; [ + nixFlakes + rund + dub + ldc + sqlite + ] + ) + ]; + meta = with pkgs.lib; { + homepage = https://sisudoc.org; + description = "a sisu like document parser"; + license = licenses.agpl3Plus; + platforms = platforms.linux; + maintainers = [ RalphAmissah ]; + }; +} diff --git a/sundry/spine_search_cgi/dub.json b/sundry/spine_search_cgi/dub.json index 12e77e5..6e17da3 100644 --- a/sundry/spine_search_cgi/dub.json +++ b/sundry/spine_search_cgi/dub.json @@ -38,7 +38,8 @@ "authors": [ "Adam Ruppee" ], "copyright": "Copyright 2011-18 Adam Ruppee", "license": "BSL-1.0", - "sourcePaths": [ "./src/ext_depends_cgi/arsd" ], + "sourcePaths": [ "./src/ext_depends_cgi/arsd" ], + "importPaths": [ "./src/ext_depends_cgi/arsd" ], "configurations": [ { "name": "cgi", @@ -55,7 +56,8 @@ "authors": [ "Nicolas Sicard", "Other contributors: see Github repo" ], "copyright": "Copyright 2011-18 Nicolas Sicard", "license": "BSL-1.0", - "sourcePaths": [ "./src/ext_depends_cgi/d2sqlite3/source" ], + "sourcePaths": [ "./src/ext_depends_cgi/d2sqlite3/source" ], + "importPaths": [ "./src/ext_depends_cgi/d2sqlite3/source" ], "configurations": [ { "name": "d2sqlite3", diff --git a/sundry/spine_search_cgi/shell.nix b/sundry/spine_search_cgi/shell.nix index 55ca4ae..8a73316 100755 --- a/sundry/spine_search_cgi/shell.nix +++ b/sundry/spine_search_cgi/shell.nix @@ -1,7 +1,8 @@ #!/usr/bin/env -S nix-shell --pure { pkgs ? import <nixpkgs> {} }: -pkgs.mkShell { - buildInputs = with pkgs; [ +with pkgs; +mkShell { + buildInputs = [ # nix_related nixFlakes nix-prefetch-git @@ -13,7 +14,6 @@ pkgs.mkShell { dub ldc #meson - # search_sqlite_related # search related sqlite # candy diff --git a/sundry/spine_search_cgi/src/ext_depends_cgi/arsd/cgi.d b/sundry/spine_search_cgi/src/ext_depends_cgi/arsd/cgi.d index a0249ee..9ac46b9 100644 --- a/sundry/spine_search_cgi/src/ext_depends_cgi/arsd/cgi.d +++ b/sundry/spine_search_cgi/src/ext_depends_cgi/arsd/cgi.d @@ -107,6 +107,7 @@ void main() { * `cgi` for traditional cgi binaries. * `fastcgi` for FastCGI builds. * `scgi` for SCGI builds. + * `stdio_http` for speaking raw http over stdin and stdout. See [RequestServer.serveSingleHttpConnectionOnStdio] for more information. ) With dmd, use: @@ -131,6 +132,8 @@ void main() { - The embedded HTTP server will use a prefork style process pool. (use instead of plain `embedded_httpd` if you want this specific implementation) * - `-version=embedded_httpd_processes_accept_after_fork` - It will call accept() in each child process, after forking. This is currently the only option, though I am experimenting with other ideas. You probably should NOT specify this right now. + * - `-version=stdio_http` + - The embedded HTTP server will be spoken over stdin and stdout. * + Tweaks + (can be used together with others) @@ -147,7 +150,7 @@ void main() { ) Compile_and_run: - + For CGI, `dmd yourfile.d cgi.d` then put the executable in your cgi-bin directory. For FastCGI: `dmd yourfile.d cgi.d -version=fastcgi` and run it. spawn-fcgi helps on nginx. You can put the file in the directory for Apache. On IIS, run it with a port on the command line (this causes it to call FCGX_OpenSocket, which can work on nginx too). @@ -325,7 +328,7 @@ void main() { web applications. For working with json, try [arsd.jsvar]. - + [arsd.database], [arsd.mysql], [arsd.postgres], [arsd.mssql], and [arsd.sqlite] can help in accessing databases. @@ -529,7 +532,7 @@ class ConnectionClosedException : Exception { } } - + version(Windows) { // FIXME: ugly hack to solve stdin exception problems on Windows: // reading stdin results in StdioException (Bad file descriptor) @@ -543,7 +546,7 @@ private struct stdin { in { assert(size, "size must be larger than 0"); } - body { + do { chunk_ = new ubyte[](size); popFront(); } @@ -592,6 +595,10 @@ static: } auto byChunk(size_t sz) { return ByChunk(sz); } + + void close() { + std.stdio.stdin.close; + } } } @@ -1338,7 +1345,7 @@ class Cgi { } /* - stderr.writeln("RECEIVED: ", pps.piece.name, "=", + stderr.writeln("RECEIVED: ", pps.piece.name, "=", pps.piece.content.length < 1000 ? to!string(pps.piece.content) @@ -1605,7 +1612,7 @@ class Cgi { /// My idea here was so you can output a progress bar or /// something to a cooperative client (see arsd.rtud for a potential helper) /// - /// The default is to do nothing. Subclass cgi and use the + /// The default is to do nothing. Subclass cgi and use the /// CustomCgiMain mixin to do something here. void onRequestBodyDataReceived(size_t receivedSoFar, size_t totalExpected) const { // This space intentionally left blank. @@ -1638,9 +1645,9 @@ class Cgi { // see: https://github.com/dlang/phobos/pull/7383 // but this might be more useful anyway tbh for this case version(Posix) - this(ir, cast(UnixAddress) ira ? "unix:" : ira.toString(), port, 0, false, &rdo, null, closeConnection); + this(ir, ira is null ? null : cast(UnixAddress) ira ? "unix:" : ira.toString(), port, 0, false, &rdo, null, closeConnection); else - this(ir, ira.toString(), port, 0, false, &rdo, null, closeConnection); + this(ir, ira is null ? null : ira.toString(), port, 0, false, &rdo, null, closeConnection); } /** @@ -1957,8 +1964,8 @@ class Cgi { /// application. Either use Apache's built in methods for basic authentication, or add /// something along these lines to your server configuration: /// - /// RewriteEngine On - /// RewriteCond %{HTTP:Authorization} ^(.*) + /// RewriteEngine On + /// RewriteCond %{HTTP:Authorization} ^(.*) /// RewriteRule ^(.*) - [E=HTTP_AUTHORIZATION:%1] /// /// To ensure the necessary data is available to cgi.d. @@ -2413,7 +2420,7 @@ class Cgi { /++ Gets a request variable as a specific type, or the default value of it isn't there or isn't convertible to the request type. - + Checks both GET and POST variables, preferring the POST variable, if available. A nice trick is using the default value to choose the type: @@ -2548,7 +2555,7 @@ class Cgi { immutable(char[]) referrer; immutable(char[]) requestUri; /// The full url if the current request, excluding the protocol and host. requestUri == scriptName ~ pathInfo ~ (queryString.length ? "?" ~ queryString : ""); - immutable(char[]) remoteAddress; /// The IP address of the user, as we see it. (Might not match the IP of the user's computer due to things like proxies and NAT.) + immutable(char[]) remoteAddress; /// The IP address of the user, as we see it. (Might not match the IP of the user's computer due to things like proxies and NAT.) immutable bool https; /// Was the request encrypted via https? immutable int port; /// On what TCP port number did the server receive the request? @@ -2561,7 +2568,7 @@ class Cgi { /** Represents user uploaded files. - + When making a file upload form, be sure to follow the standard: set method="POST" and enctype="multipart/form-data" in your html <form> tag attributes. The key into this array is the name attribute on your input tag, just like with other post variables. See the comments on the UploadedFile struct for more information about the data inside, including important notes on max size and content location. */ immutable(UploadedFile[][string]) filesArray; @@ -2886,7 +2893,7 @@ struct Uri { path_loop: auto path_start = idx; - + foreach(char c; uri[idx .. $]) { if(c == '?' || c == '#') break; @@ -2991,14 +2998,23 @@ struct Uri { if(part == ".") { continue; } else if(part == "..") { - toKeep = toKeep[0 .. $-1]; + //if(toKeep.length > 1) + toKeep = toKeep[0 .. $-1]; + //else + //toKeep = [""]; continue; } else { + //if(toKeep.length && toKeep[$-1].length == 0 && part.length == 0) + //continue; // skip a `//` situation toKeep ~= part; } } - this.path = toKeep.join("/"); + auto path = toKeep.join("/"); + if(path.length && path[0] != '/') + path = "/" ~ path; + + this.path = path; } unittest { @@ -3081,6 +3097,9 @@ struct Uri { assert(Uri("./").basedOn(url) == "/test/", Uri("./").basedOn(url)); assert(Uri("../").basedOn(url) == "/"); + url = Uri("http://example.com/"); + assert(Uri("../foo").basedOn(url) == "http://example.com/foo"); + //auto uriBefore = url; url = Uri("#anchor"); // everything should remain the same except the anchor //uriBefore.anchor = "anchor"); @@ -3258,6 +3277,37 @@ mixin template GenericMain(alias fun, long maxContentLength = defaultMaxContentL mixin CustomCgiMain!(Cgi, fun, maxContentLength); } +/++ + Boilerplate mixin for a main function that uses the [dispatcher] function. + + You can send `typeof(null)` as the `Presenter` argument to use a generic one. + + History: + Added July 9, 2021 ++/ +mixin template DispatcherMain(Presenter, DispatcherArgs...) { + /++ + Handler to the generated presenter you can use from your objects, etc. + +/ + Presenter activePresenter; + + /++ + Request handler that creates the presenter then forwards to the [dispatcher] function. + Renders 404 if the dispatcher did not handle the request. + +/ + void handler(Cgi cgi) { + auto presenter = new Presenter; + activePresenter = presenter; + scope(exit) activePresenter = null; + + if(cgi.dispatcher!DispatcherArgs(presenter)) + return; + + presenter.renderBasicError(cgi, 404); + } + mixin GenericMain!handler; +} + private string simpleHtmlEncode(string s) { return s.replace("&", "&").replace("<", "<").replace(">", ">").replace("\n", "<br />\n"); } @@ -3508,6 +3558,9 @@ struct RequestServer { } else version(fastcgi) { serveFastCgi!(fun, CustomCgi, maxContentLength)(this); + } else + version(stdio_http) { + serveSingleHttpConnectionOnStdio!(fun, CustomCgi, maxContentLength)(); } else { //version=plain_cgi; handleCgiRequest!(fun, CustomCgi, maxContentLength)(); @@ -3523,6 +3576,18 @@ struct RequestServer { manager.listen(); } + /++ + Serves a single "connection", but the connection is spoken on stdin and stdout instead of on a socket. + + Intended for cases like working from systemd, like discussed here: https://forum.dlang.org/post/avmkfdiitirnrenzljwc@forum.dlang.org + + History: + Added May 29, 2021 + +/ + void serveSingleHttpConnectionOnStdio(alias fun, CustomCgi = Cgi, long maxContentLength = defaultMaxContentLength)() { + doThreadHttpConnectionGuts!(CustomCgi, fun, true)(new FakeSocketForStdin()); + } + void stop() { // FIXME } @@ -4015,7 +4080,7 @@ void handleCgiRequest(alias fun, CustomCgi = Cgi, long maxContentLength = defaul specify if you yield all bets are off. when the request is finished, if there's more data buffered, it just - keeps going. if there is no more data buffered, it epoll ctls to + keeps going. if there is no more data buffered, it epoll ctls to get triggered when more data comes in. all one shot. when a connection is closed, the fiber returns and is then reset @@ -4034,6 +4099,17 @@ void handleCgiRequest(alias fun, CustomCgi = Cgi, long maxContentLength = defaul +/ +/++ + The stack size when a fiber is created. You can set this from your main or from a shared static constructor + to optimize your memory use if you know you don't need this much space. Be careful though, some functions use + more stack space than you realize and a recursive function (including ones like in dom.d) can easily grow fast! + + History: + Added July 10, 2021. Previously, it used the druntime default of 16 KB. ++/ +version(cgi_use_fiber) +__gshared size_t fiberStackSize = 4096 * 100; + version(cgi_use_fiber) class CgiFiber : Fiber { private void function(Socket) f_handler; @@ -4047,8 +4123,7 @@ class CgiFiber : Fiber { this(void delegate(Socket) handler) { this.handler = handler; - // FIXME: stack size - super(&run); + super(&run, fiberStackSize); } Socket connection; @@ -4576,7 +4651,7 @@ class BufferedInputRange { // gonna treat a timeout here as a close sourceClosed = true; return; - } + } } version(Posix) { import core.stdc.errno; @@ -4643,6 +4718,53 @@ class BufferedInputRange { bool sourceClosed; } +private class FakeSocketForStdin : Socket { + import std.stdio; + + this() { + + } + + private bool closed; + + override ptrdiff_t receive(void[] buffer, std.socket.SocketFlags) @trusted { + if(closed) + throw new Exception("Closed"); + return stdin.rawRead(buffer).length; + } + + override ptrdiff_t send(const void[] buffer, std.socket.SocketFlags) @trusted { + if(closed) + throw new Exception("Closed"); + stdout.rawWrite(buffer); + return buffer.length; + } + + override void close() @trusted { + (cast(void delegate() @nogc nothrow) &realClose)(); + } + + override void shutdown(SocketShutdown s) { + // FIXME + } + + override void setOption(SocketOptionLevel, SocketOption, void[]) {} + override void setOption(SocketOptionLevel, SocketOption, Duration) {} + + override @property @trusted Address remoteAddress() { return null; } + override @property @trusted Address localAddress() { return null; } + + void realClose() { + closed = true; + try { + stdin.close(); + stdout.close(); + } catch(Exception e) { + + } + } +} + import core.sync.semaphore; import core.atomic; @@ -4909,6 +5031,7 @@ void sendAll(Socket s, const(void)[] data, string file = __FILE__, size_t line = throw new ConnectionException(s, lastSocketError, file, line); } assert(amount > 0); + data = data[amount .. $]; } while(data.length); } @@ -5488,7 +5611,7 @@ version(cgi_with_websocket) { WebSocketFrame wss; wss.fin = true; wss.opcode = WebSocketOpcode.close; - wss.data = cast(ubyte[]) reason; + wss.data = cast(ubyte[]) reason.dup; wss.send(&llsend); readyState_ = CLOSING; @@ -5523,7 +5646,7 @@ version(cgi_with_websocket) { WebSocketFrame wss; wss.fin = true; wss.opcode = WebSocketOpcode.text; - wss.data = cast(ubyte[]) textData; + wss.data = cast(ubyte[]) textData.dup; wss.send(&llsend); } @@ -5535,7 +5658,7 @@ version(cgi_with_websocket) { WebSocketFrame wss; wss.fin = true; wss.opcode = WebSocketOpcode.binary; - wss.data = cast(ubyte[]) binaryData; + wss.data = cast(ubyte[]) binaryData.dup; wss.send(&llsend); } @@ -5773,7 +5896,7 @@ version(cgi_with_websocket) { WebSocketFrame msg; msg.fin = true; msg.opcode = opcode; - msg.data = cast(ubyte[]) data; + msg.data = cast(ubyte[]) data.dup; return msg; } @@ -5906,7 +6029,7 @@ version(cgi_with_websocket) { if(d.length < 8) return needsMoreData(); foreach(i; 0 .. 8) { - msg.realLength |= d[0] << ((7-i) * 8); + msg.realLength |= ulong(d[0]) << ((7-i) * 8); d = d[1 .. $]; } } else { @@ -6282,7 +6405,7 @@ https://docs.microsoft.com/en-us/windows/desktop/api/winsock2/nf-winsock2-wsaget You can customize your server by subclassing the appropriate server. Then, register your subclass at compile time with the [registerEventIoServer] template, or implement your own main function and call it yourself. - + $(TIP If you make your subclass a `final class`, there is a slight performance improvement.) +/ version(with_addon_servers_connections) @@ -6691,7 +6814,7 @@ interface Session(Data) : SessionObject { /++ An implementation of [Session] that works on real cgi connections utilizing the [BasicDataServer]. - + As opposed to a [MockSession] which is made for testing purposes. You will not construct one of these directly. See [Cgi.getSessionObject] instead. @@ -7281,9 +7404,9 @@ final class EventSourceServerImplementation : EventSourceServer, EventIoServer { foreach(url, connections; eventConnectionsByUrl) foreach(connection; connections) if(connection.needsChunking) - nonBlockingWrite(this, connection.fd, "2\r\n:\n"); + nonBlockingWrite(this, connection.fd, "2\r\n:\n\r\n"); else - nonBlockingWrite(this, connection.fd, ":\n"); + nonBlockingWrite(this, connection.fd, ":\n\r\n"); } void fileClosed(int fd) { @@ -7449,18 +7572,21 @@ final class EventSourceServerImplementation : EventSourceServer, EventIoServer { auto len = toHex(formattedMessage.length); buffer[4 .. 6] = "\r\n"[]; buffer[4 - len.length .. 4] = len[]; + buffer[6 + formattedMessage.length] = '\r'; + buffer[6 + formattedMessage.length + 1] = '\n'; - auto chunkedMessage = buffer[4 - len.length .. 6 + formattedMessage.length]; + auto chunkedMessage = buffer[4 - len.length .. 6 + formattedMessage.length +2]; // done // FIXME: send back requests when needed // FIXME: send a single ":\n" every 15 seconds to keep alive foreach(connection; connections) { - if(connection.needsChunking) + if(connection.needsChunking) { nonBlockingWrite(this, connection.fd, chunkedMessage); - else + } else { nonBlockingWrite(this, connection.fd, formattedMessage); + } } } } @@ -8025,8 +8151,34 @@ auto callFromCgi(alias method, T)(T dg, Cgi cgi) { *what = T.init; return true; } else { - // could be a child - if(name[paramName.length] == '.') { + // could be a child. gonna allow either obj.field OR obj[field] + + string afterName; + + if(name[paramName.length] == '[') { + int count = 1; + auto idx = paramName.length + 1; + while(idx < name.length && count > 0) { + if(name[idx] == '[') + count++; + else if(name[idx] == ']') { + count--; + if(count == 0) break; + } + idx++; + } + + if(idx == name.length) + return false; // malformed + + auto insideBrackets = name[paramName.length + 1 .. idx]; + afterName = name[idx + 1 .. $]; + + name = name[0 .. paramName.length]; + + paramName = insideBrackets; + + } else if(name[paramName.length] == '.') { paramName = name[paramName.length + 1 .. $]; name = paramName; int p = 0; @@ -8036,17 +8188,23 @@ auto callFromCgi(alias method, T)(T dg, Cgi cgi) { p++; } - // set the child member - switch(paramName) { - foreach(idx, memberName; __traits(allMembers, T)) - static if(__traits(compiles, __traits(getMember, T, memberName).offsetof)) { - // data member! - case memberName: - return setVariable(name, paramName, &(__traits(getMember, *what, memberName)), value); - } - default: - // ok, not a member + afterName = paramName[p .. $]; + paramName = paramName[0 .. p]; + } else { + return false; + } + + if(paramName.length) + // set the child member + switch(paramName) { + foreach(idx, memberName; __traits(allMembers, T)) + static if(__traits(compiles, __traits(getMember, T, memberName).offsetof)) { + // data member! + case memberName: + return setVariable(name ~ afterName, paramName, &(__traits(getMember, *what, memberName)), value); } + default: + // ok, not a member } } @@ -8458,13 +8616,13 @@ html", true, true); } /// Multiple responses deconstruct the algebraic type and forward to the appropriate handler at runtime - void presentSuccessfulReturn(T : MultipleResponses!Types, Types...)(Cgi cgi, T ret, typeof(null) meta, string format) { + void presentSuccessfulReturn(T : MultipleResponses!Types, Meta, Types...)(Cgi cgi, T ret, Meta meta, string format) { bool outputted = false; foreach(index, type; Types) { if(ret.contains == index) { assert(!outputted); outputted = true; - (cast(CRTP) this).presentSuccessfulReturnAsHtml(cgi, ret.payload[index], meta); + (cast(CRTP) this).presentSuccessfulReturn(cgi, ret.payload[index], meta, format); } } if(!outputted) @@ -8574,7 +8732,19 @@ html", true, true); auto div = Element.make("div"); div.addClass("form-field"); - static if(is(T == struct)) { + static if(is(T == Cgi.UploadedFile)) { + Element lbl; + if(displayName !is null) { + lbl = div.addChild("label"); + lbl.addChild("span", displayName, "label-text"); + lbl.appendText(" "); + } else { + lbl = div; + } + auto i = lbl.addChild("input", name); + i.attrs.name = name; + i.attrs.type = "file"; + } else static if(is(T == struct)) { if(displayName !is null) div.addChild("span", displayName, "label-text"); auto fieldset = div.addChild("fieldset"); @@ -9167,7 +9337,7 @@ private auto serveApiInternal(T)(string urlPrefix) { } else static if(__traits(isSame, AutomaticForm, attr)) { automaticForm = true; } - + /+ int zeroArgOverload = -1; int overloadCount = cast(int) __traits(getOverloads, T, methodName).length; @@ -9304,7 +9474,7 @@ private auto serveApiInternal(T)(string urlPrefix) { default: return false; } - + assert(0); } return DispatcherDefinition!internalHandler(urlPrefix, false); @@ -9948,7 +10118,7 @@ struct DUMMY {} struct SetOfFields(T) { private void[0][string] storage; void set(string what) { - //storage[what] = + //storage[what] = } void unset(string what) {} void setAll() {} @@ -10255,27 +10425,58 @@ private static string getHttpCodeText(int code) pure nothrow @nogc { case 203: return "203 Non-Authoritative Information"; case 204: return "204 No Content"; case 205: return "205 Reset Content"; + case 206: return "206 Partial Content"; // case 300: return "300 Multiple Choices"; case 301: return "301 Moved Permanently"; case 302: return "302 Found"; case 303: return "303 See Other"; + case 304: return "304 Not Modified"; + case 305: return "305 Use Proxy"; case 307: return "307 Temporary Redirect"; case 308: return "308 Permanent Redirect"; + // - // FIXME: add more common 400 ones cgi.d might return too case 400: return "400 Bad Request"; + case 401: return "401 Unauthorized"; + case 402: return "402 Payment Required"; case 403: return "403 Forbidden"; case 404: return "404 Not Found"; case 405: return "405 Method Not Allowed"; case 406: return "406 Not Acceptable"; + case 407: return "407 Proxy Authentication Required"; + case 408: return "408 Request Timeout"; case 409: return "409 Conflict"; case 410: return "410 Gone"; - // + case 411: return "411 Length Required"; + case 412: return "412 Precondition Failed"; + case 413: return "413 Payload Too Large"; + case 414: return "414 URI Too Long"; + case 415: return "415 Unsupported Media Type"; + case 416: return "416 Range Not Satisfiable"; + case 417: return "417 Expectation Failed"; + case 418: return "418 I'm a teapot"; + case 421: return "421 Misdirected Request"; + case 422: return "422 Unprocessable Entity (WebDAV)"; + case 423: return "423 Locked (WebDAV)"; + case 424: return "424 Failed Dependency (WebDAV)"; + case 425: return "425 Too Early"; + case 426: return "426 Upgrade Required"; + case 428: return "428 Precondition Required"; + case 431: return "431 Request Header Fields Too Large"; + case 451: return "451 Unavailable For Legal Reasons"; + case 500: return "500 Internal Server Error"; case 501: return "501 Not Implemented"; case 502: return "502 Bad Gateway"; case 503: return "503 Service Unavailable"; + case 504: return "504 Gateway Timeout"; + case 505: return "505 HTTP Version Not Supported"; + case 506: return "506 Variant Also Negotiates"; + case 507: return "507 Insufficient Storage (WebDAV)"; + case 508: return "508 Loop Detected (WebDAV)"; + case 510: return "510 Not Extended"; + case 511: return "511 Network Authentication Required"; // default: assert(0, "Unsupported http code"); } diff --git a/sundry/spine_search_cgi/src/ext_depends_cgi/d2sqlite3/.github/workflows/main.yaml b/sundry/spine_search_cgi/src/ext_depends_cgi/d2sqlite3/.github/workflows/main.yaml index 28eaebe..f915693 100644 --- a/sundry/spine_search_cgi/src/ext_depends_cgi/d2sqlite3/.github/workflows/main.yaml +++ b/sundry/spine_search_cgi/src/ext_depends_cgi/d2sqlite3/.github/workflows/main.yaml @@ -58,12 +58,12 @@ jobs: dub --version # Build and run the tests - - name: '[POSIX] Build & test Agora' + - name: '[POSIX] Build & test' if: runner.os != 'Windows' #continue-on-error: matrix.dc == 'ldc-master' || matrix.dc == 'dmd-master' run: dub test -c ci - - name: '[Windows] Build & test Agora' + - name: '[Windows] Build & test' if: runner.os == 'Windows' #continue-on-error: matrix.dc == 'ldc-master' || matrix.dc == 'dmd-master' shell: cmd diff --git a/sundry/spine_search_cgi/src/ext_depends_cgi/d2sqlite3/source/d2sqlite3/database.d b/sundry/spine_search_cgi/src/ext_depends_cgi/d2sqlite3/source/d2sqlite3/database.d index 7aebe63..93a6509 100644 --- a/sundry/spine_search_cgi/src/ext_depends_cgi/d2sqlite3/source/d2sqlite3/database.d +++ b/sundry/spine_search_cgi/src/ext_depends_cgi/d2sqlite3/source/d2sqlite3/database.d @@ -1184,7 +1184,7 @@ version (_UnlockNotify) /// Unlocks the handler, state is one of SQLITE_LOCKED or SQLITE_OK void emit(int res) nothrow in { assert(res == SQLITE_LOCKED || res == SQLITE_OK); } - body + do { try { @@ -1208,7 +1208,7 @@ version (_UnlockNotify) /// Result after wait is finished @property int result() const out (result) { assert(result == SQLITE_OK || result == SQLITE_LOCKED); } - body { return res; } + do { return res; } } } else @@ -1238,7 +1238,7 @@ version (_UnlockNotify) /// Constructor this(Duration max = 1000.msecs) in { assert(max > Duration.zero); } - body + do { maxDuration = max; } @@ -1271,7 +1271,7 @@ version (_UnlockNotify) /// Result after wait is finished @property int result() const out (result) { assert(result == SQLITE_OK || result == SQLITE_LOCKED); } - body + do { return res; } diff --git a/sundry/spine_search_cgi/src/ext_depends_cgi/d2sqlite3/source/d2sqlite3/statement.d b/sundry/spine_search_cgi/src/ext_depends_cgi/d2sqlite3/source/d2sqlite3/statement.d index 14fe855..8cf6a38 100644 --- a/sundry/spine_search_cgi/src/ext_depends_cgi/d2sqlite3/source/d2sqlite3/statement.d +++ b/sundry/spine_search_cgi/src/ext_depends_cgi/d2sqlite3/source/d2sqlite3/statement.d @@ -162,7 +162,7 @@ public: { assert(index > 0 && index <= p.paramCount, "parameter index out of range"); } - body + do { assert(p.handle); @@ -239,7 +239,7 @@ public: { assert(name.length); } - body + do { assert(p.handle); auto index = sqlite3_bind_parameter_index(p.handle, name.toStringz); @@ -255,7 +255,7 @@ public: { assert(Args.length == this.parameterCount, "parameter count mismatch"); } - body + do { foreach (index, _; Args) bind(index + 1, args[index]); @@ -342,7 +342,7 @@ public: static if (__traits(compiles, obj.length)) assert(obj.length == this.parameterCount, "parameter count mismatch"); } - body + do { static if (__traits(compiles, { foreach (string k, ref v; obj) {} })) { @@ -377,7 +377,7 @@ public: { assert(index > 0 && index <= p.paramCount, "parameter index out of range"); } - body + do { assert(p.handle); return sqlite3_bind_parameter_name(p.handle, index).to!string; @@ -394,7 +394,7 @@ public: { assert(name.length); } - body + do { assert(p.handle); return sqlite3_bind_parameter_index(p.handle, name.toStringz); |