ucb/source/ucp/webdav-curl/CurlSession.cxx   |   33 +++++++++++++++------------
 ucb/source/ucp/webdav-curl/webdavcontent.cxx |    9 ++++---
 2 files changed, 25 insertions(+), 17 deletions(-)

New commits:
commit 0dca261c9361dbebf097bd5591a5e33269698739
Author:     Michael Stahl <michael.st...@allotropia.de>
AuthorDate: Thu Nov 25 17:36:20 2021 +0100
Commit:     Michael Stahl <michael.st...@allotropia.de>
CommitDate: Fri Nov 26 18:02:20 2021 +0100

    ucb: webdav-curl: avoid pointless HEAD requests for DAV resources
    
    Content::getPropertyValues() has a funny conditional which was changed
    in commit a86bc34ddcff6b04bb9fdb8c960bbf2c49540da1 "Resolves: #i121922#
    Fix wrong condition".
    
    But the new condition also appears wrong, there is no need to do any
    further requests if all properties have already been retrieved, so the
    check for m_bDidGetOrHead could just be omitted.
    
    There is a surprising result in the Remote Files dialog when connecting
    to a Sharepoint 16 server: all properties have been retrieved from the
    directory given as Root, but because of !m_bDidGetOrHead the branch is
    taken anyway.
    
    The HEAD request results in a "HTTP/1.1 302 Redirect" to
    $ROOT/Forms/AllItems.aspx, HEAD on this URL gives a "HTTP/1.1 200 OK"
    and then subsequently a PROPFIND on that very same URL results in
    "HTTP/1.1 404 NOT FOUND".
    
    Physics claims that a single observation should be sufficient to
    determine whether an URL exists or not, but Sharepoint is apparently
    able to maintain this quantum superposition indefinitely, hence the
    Remote Files dialog doesn't display anything as the 404 causes an exception.
    
    Try to fix this by requiring a missing property again before initiating
    the HEAD request for DAV resources.
    
    Change-Id: I1239762948b6abd1f8fc097edd4a16cb6b75ca7a
    Reviewed-on: https://gerrit.libreoffice.org/c/core/+/125826
    Tested-by: Jenkins
    Reviewed-by: Michael Stahl <michael.st...@allotropia.de>
    (cherry picked from commit 4bbaec0ef84cccae2003d7b0690164482e148797)
    Reviewed-on: https://gerrit.libreoffice.org/c/core/+/125845

diff --git a/ucb/source/ucp/webdav-curl/webdavcontent.cxx 
b/ucb/source/ucp/webdav-curl/webdavcontent.cxx
index 8bf19a582693..1eb069a3480e 100644
--- a/ucb/source/ucp/webdav-curl/webdavcontent.cxx
+++ b/ucb/source/ucp/webdav-curl/webdavcontent.cxx
@@ -1452,9 +1452,12 @@ uno::Reference< sdbc::XRow > Content::getPropertyValues(
             // All properties obtained already?
             std::vector< OUString > aMissingProps;
             if ( !( xProps
-                    && xProps->containsAllNames(
-                        rProperties, aMissingProps ) )
-                 || !m_bDidGetOrHead )
+                    && xProps->containsAllNames(rProperties, aMissingProps))
+                // i#121922 for non-DAV, uncacheable properties must be fetched
+                // regardless of m_bDidGetOrHead.
+                // But SharePoint may do weird things on HEAD so for DAV
+                // only do this if required.
+                && (eType != DAV || !m_bDidGetOrHead))
             {
                 // Possibly the missing props can be obtained using a HEAD
                 // request.
commit ac787764de3806b75607634a5c5856f58ddce881
Author:     Michael Stahl <michael.st...@allotropia.de>
AuthorDate: Thu Nov 25 17:16:36 2021 +0100
Commit:     Michael Stahl <michael.st...@allotropia.de>
CommitDate: Fri Nov 26 18:02:06 2021 +0100

    ucb: webdav-curl: don't use chunked encoding for PROPFIND
    
    Sharepoint 16 responds to PROPFIND with "Transfer-Encoding: chunked"
    with "HTTP/1.1 200 OK" and this body:
    
      <HTML><DIV dir="ltr"><H2>Microsoft SharePoint Foundation 
Error.</H2>\n\n<P>\n<B>User:</B> please report details to this Web site's 
Webmaster.\n<P>\n\n<P>\n<B>Webmaster:</B> please see the server's application 
event log for more details.\n</P></DIV>
    
    Understandably, WebDAVResponseParser barfs on this:
    
    ucb/source/ucp/webdav-curl/webdavresponseparser.cxx:405: Parser destructed 
with existing content (!)
    ucb/source/ucp/webdav-curl/webdavresponseparser.cxx:942: WebDAV Parse error 
(!)
    
    Apparently setting Content-Length works better.
    
    Change-Id: If3eae8c2f8b9f1e1bb4ed4fc67a79a682d7d0050
    Reviewed-on: https://gerrit.libreoffice.org/c/core/+/125825
    Tested-by: Jenkins
    Reviewed-by: Michael Stahl <michael.st...@allotropia.de>
    (cherry picked from commit 8bb183f133202ebb687e0b56cc5a2938354506f7)
    Reviewed-on: https://gerrit.libreoffice.org/c/core/+/125844

diff --git a/ucb/source/ucp/webdav-curl/CurlSession.cxx 
b/ucb/source/ucp/webdav-curl/CurlSession.cxx
index 35c9b4df1e44..679083603d9b 100644
--- a/ucb/source/ucp/webdav-curl/CurlSession.cxx
+++ b/ucb/source/ucp/webdav-curl/CurlSession.cxx
@@ -1428,13 +1428,7 @@ auto CurlProcessor::PropFind(
            || (::std::get<1>(*o_pRequestedProperties) != nullptr)
                   != (::std::get<2>(*o_pRequestedProperties) != nullptr));
 
-    // TODO: either set CURLOPT_INFILESIZE_LARGE or chunked?
-    ::std::unique_ptr<curl_slist, deleter_from_fn<curl_slist, 
curl_slist_free_all>> pList(
-        curl_slist_append(nullptr, "Transfer-Encoding: chunked"));
-    if (!pList)
-    {
-        throw uno::RuntimeException("curl_slist_append failed");
-    }
+    ::std::unique_ptr<curl_slist, deleter_from_fn<curl_slist, 
curl_slist_free_all>> pList;
     pList.reset(curl_slist_append(pList.release(), "Content-Type: 
application/xml"));
     if (!pList)
     {
@@ -1461,13 +1455,11 @@ auto CurlProcessor::PropFind(
         throw uno::RuntimeException("curl_slist_append failed");
     }
 
-    ::std::vector<CurlOption> const options{ { CURLOPT_CUSTOMREQUEST, 
"PROPFIND",
-                                               "CURLOPT_CUSTOMREQUEST" } };
-
-    uno::Reference<io::XInputStream> const 
xRequestInStream(io::Pipe::create(rSession.m_xContext));
-    uno::Reference<io::XOutputStream> const 
xRequestOutStream(xRequestInStream, uno::UNO_QUERY);
-    assert(xRequestInStream.is());
+    uno::Reference<io::XSequenceOutputStream> const xSeqOutStream(
+        io::SequenceOutputStream::create(rSession.m_xContext));
+    uno::Reference<io::XOutputStream> const xRequestOutStream(xSeqOutStream);
     assert(xRequestOutStream.is());
+
     uno::Reference<xml::sax::XWriter> const 
xWriter(xml::sax::Writer::create(rSession.m_xContext));
     xWriter->setOutputStream(xRequestOutStream);
     xWriter->startDocument();
@@ -1503,7 +1495,19 @@ auto CurlProcessor::PropFind(
     }
     xWriter->endElement("propfind");
     xWriter->endDocument();
-    xRequestOutStream->closeOutput();
+
+    uno::Reference<io::XInputStream> const xRequestInStream(
+        io::SequenceInputStream::createStreamFromSequence(rSession.m_xContext,
+                                                          
xSeqOutStream->getWrittenBytes()));
+    assert(xRequestInStream.is());
+
+    curl_off_t const len(xSeqOutStream->getWrittenBytes().getLength());
+
+    ::std::vector<CurlOption> const options{
+        { CURLOPT_CUSTOMREQUEST, "PROPFIND", "CURLOPT_CUSTOMREQUEST" },
+        // note: Sharepoint cannot handle "Transfer-Encoding: chunked"
+        { CURLOPT_INFILESIZE_LARGE, len, nullptr, CurlOption::Type::CurlOffT }
+    };
 
     // stream for response
     uno::Reference<io::XInputStream> const 
xResponseInStream(io::Pipe::create(rSession.m_xContext));
@@ -1801,6 +1805,7 @@ auto CurlSession::PUT(OUString const& rURIReference,
 
     // lock m_Mutex after accessing global LockStore to avoid deadlock
 
+    // note: Nextcloud 20 cannot handle "Transfer-Encoding: chunked"
     ::std::vector<CurlOption> const options{ { CURLOPT_INFILESIZE_LARGE, len, 
nullptr,
                                                CurlOption::Type::CurlOffT } };
 

Reply via email to