summaryrefslogtreecommitdiff
path: root/Runtime/Export/WWW.cpp
diff options
context:
space:
mode:
authorchai <chaifix@163.com>2019-08-14 22:50:43 +0800
committerchai <chaifix@163.com>2019-08-14 22:50:43 +0800
commit15740faf9fe9fe4be08965098bbf2947e096aeeb (patch)
treea730ec236656cc8cab5b13f088adfaed6bb218fb /Runtime/Export/WWW.cpp
+Unity Runtime codeHEADmaster
Diffstat (limited to 'Runtime/Export/WWW.cpp')
-rw-r--r--Runtime/Export/WWW.cpp1575
1 files changed, 1575 insertions, 0 deletions
diff --git a/Runtime/Export/WWW.cpp b/Runtime/Export/WWW.cpp
new file mode 100644
index 0000000..facc72b
--- /dev/null
+++ b/Runtime/Export/WWW.cpp
@@ -0,0 +1,1575 @@
+#include "UnityPrefix.h"
+#include "WWW.h"
+#include "Runtime/Utilities/LogAssert.h"
+#include "Runtime/Threads/Thread.h"
+#include "Configuration/UnityConfigureVersion.h"
+#include "PlatformDependent/CommonWebPlugin/UnityWebStream.h"
+#include "Runtime/Misc/ReproductionLog.h"
+#include "Runtime/Scripting/ScriptingUtility.h"
+#include "Runtime/Scripting/Backend/ScriptingInvocation.h"
+#include "Runtime/Misc/PlayerSettings.h"
+#include "Runtime/Utilities/PathNameUtility.h"
+#include "Runtime/Scripting/Backend/ScriptingMethodRegistry.h"
+#include "External/Curl/include/minimalcurl.h"
+
+#if UNITY_EDITOR
+#include "Editor/Src/EditorSettings.h"
+#include "Editor/Src/EditorUserBuildSettings.h"
+#endif
+
+#if UNITY_IPHONE
+#include <CFNetwork/CFNetwork.h>
+#include "PlatformDependent/iPhonePlayer/iPhoneWWW.h"
+#elif UNITY_ANDROID
+#include "PlatformDependent/AndroidPlayer/AndroidWWW.h"
+#elif UNITY_PS3
+#include "PlatformDependent/PS3Player/PS3WWW.h"
+#elif UNITY_FLASH
+#include "PlatformDependent/FlashSupport/cpp/WWWFlash.h"
+#elif UNITY_WINRT
+#include "PlatformDependent/MetroPlayer/WWWMetro.h"
+#elif UNITY_XENON
+#include "PlatformDependent/Xbox360/Source/XenonWWW.h"
+#elif UNITY_LINUX
+#include "PlatformDependent/Linux/WWW.h"
+#endif
+
+#if UNITY_WIN
+#include <malloc.h> // alloca
+#if !UNITY_WP8
+#include "Winhttp.h"
+#endif
+#include "PlatformDependent\Win\WinUnicode.h"
+#endif
+#ifdef __MWERKS__
+#include <alloca.h>
+#endif
+#if UNITY_OSX
+#include "PlatformDependent/OSX/HttpProxy.h"
+#endif
+
+#include <algorithm>
+
+#if ENABLE_WWW
+
+#if WWW_USE_CURL
+static void SetCurlOptProxy (CURL* curl, const std::string& proxyUtf8, std::string::size_type offset)
+{
+ std::string::size_type end = proxyUtf8.find(";", offset);
+ std::string proxyConverted = proxyUtf8.substr(offset, end-offset);
+ curl_easy_setopt(curl, CURLOPT_PROXY, proxyConverted.c_str());
+}
+
+void SetupCurlProxyServerBasedOnEnvironmentVariable(CURL* curl, const char* url)
+{
+ const char* proxy = getenv("UNITY_PROXYSERVER");
+ if (proxy)
+ {
+ printf_console("Setting up proxyserver from UNITY_PROXYSERVER environment variable. Setting to: %s\n",proxy);
+ curl_easy_setopt(curl, CURLOPT_PROXY, proxy);
+ }
+#if UNITY_OSX || UNITY_LINUX
+ else
+ {
+ string proxy;
+ string auth;
+ if (GetProxyForURL(url, proxy, auth))
+ {
+ curl_easy_setopt(curl, CURLOPT_PROXY, proxy.c_str());
+ if (!auth.empty())
+ curl_easy_setopt(curl, CURLOPT_PROXYUSERPWD, auth.c_str());
+ }
+ }
+#elif UNITY_WIN
+ else
+ {
+ WINHTTP_CURRENT_USER_IE_PROXY_CONFIG config;
+ if (WinHttpGetIEProxyConfigForCurrentUser(&config) && config.lpszProxy != NULL)
+ {
+ std::string proxyUtf8;
+ ConvertWideToUTF8String( config.lpszProxy, proxyUtf8 );
+ std::string::size_type start = proxyUtf8.find("http=");
+ if (start != string::npos)
+ {
+ // handle http proxy with http= prefix
+ SetCurlOptProxy(curl, proxyUtf8, start+5);
+ }
+ else
+ {
+ // case #534876
+ // proxy might be set, but without the http= prefix.
+ // Sufficient to check that no "=" characters are found in the string
+ if (proxyUtf8.find("=") == std::string::npos )
+ SetCurlOptProxy(curl, proxyUtf8, 0);
+ }
+ }
+ }
+#else
+#pragma message("WARNING: This platform has no system proxy support")
+#endif
+}
+
+#endif // WWW_USE_CURL
+
+/*
+ WWW cleanup procedure>
+
+ WWW c# has pointer to the www class. When the WWW class is garbage collected. It tells the WWW class to kill itself. (RequestDestroy)
+ RequestDestroy will
+ * When the download is complete, the WWW c++ class is deleted immediately.
+ * When the download thread is still running, set a flag abortDownload in buffer which causes download to stop asap
+
+When the thread is finished, we check the abortDownload and based on that delete the www class
+(because RequestDestroy was called and no one is interested in it anymore)
+
+Or just keep it around, for any WWW c# access.
+
+*/
+
+const char* kWWWErrCustomHeadersWithGET="Error when creating request. GET request with custom headers is not supported.";
+const char* kWWWErrZeroPostData="Error when creating request. POST request with a zero-sized post buffer is not supported.";
+const char* kWWWErrNULLPostDataWithPositiveLength="Internal error when creating request. Post data is NULL but length is larger than 0";
+const char* kWWWErrCancelled="WWW request was cancelled";
+const char* kWWWErrPostDataWithNonHTTPSchema="Error when creating request. Non HTTP schemas with post data are not supported.";
+const char* kWWWErrHeadersWithNonHTTPSchema="Error when creating request. Non HTTP schemas with custom headers are not supported.";
+
+
+double CalculateEta (int downloadedBytes, int totalBytes, double startTime)
+{
+ double curTime = GetTimeSinceStartup();
+ // bytes left (total bytes can be smaller for streamed files, which are complete when the stream can begin)
+ int bytesLeft = std::max(totalBytes - downloadedBytes, 0);
+ double bytesPerSecond = downloadedBytes / std::max((curTime - startTime), 0.1);
+ double timeLeft = bytesLeft / bytesPerSecond;
+ return timeLeft;
+}
+
+const char* GetCachedWWWError(const WWW& www, std::string& err)
+{
+ if (!err.empty())
+ {
+ return err.c_str();
+ }
+
+ UnityWebStream* stream = www.GetUnityWebStream();
+ if( stream && stream->IsErrorFlagSet() )
+ {
+ // Constructs the error string without temporaries
+ err = stream->GetError ();
+ err += " URL: ";
+ err += www.GetUrl();
+ return err.c_str();
+ }
+
+ return 0;
+}
+
+
+WWW::~WWW ()
+{
+ if (m_UnityWebStream != NULL)
+ m_UnityWebStream->Release();
+
+ #if SUPPORT_REPRODUCE_LOG
+ CleanupWWW(this);
+ #endif
+}
+
+#if SUPPORT_THREADS
+void WWW::SetThreadPriority (ThreadPriority p)
+{
+ m_ThreadPriority = p;
+ if(m_UnityWebStream)
+ m_UnityWebStream->SetDecompressionPriority(m_ThreadPriority);
+}
+#endif
+
+WWW::SecurityPolicy WWW::GetSecurityPolicy() const
+{
+ return kSecurityPolicyAllowAccess;
+}
+
+std::string WWW::GetResponseHeaders() //returning a copy instead of a reference for thread safety
+{
+ // JVD: std::string does not have atomic copy c-tor, so returning by value does not help to improve thread-safety
+ return m_ResponseHeaders;
+}
+
+bool WWW::SetErrorFromResponseHeaders ()
+{
+ std::string headers = GetResponseHeaders();
+
+ if ( !BeginsWith(headers, "HTTP") )
+ return false; // Not a valid response header
+
+ // only interested in first line.
+ headers = headers.substr(0, headers.find ("\n"));
+
+ // get result code
+ size_t resultCodePos = headers.find (' ');
+ if (resultCodePos != string::npos)
+ {
+ headers = headers.substr(resultCodePos+1);
+ int status = 0;
+ if (sscanf (headers.c_str(), "%d", &status))
+ {
+ if (status >= 400)
+ {
+ SetError (headers);
+ return true;
+ }
+ }
+ }
+
+ return false;
+}
+
+UnityWebStream* WWW::GetUnityWebStream() const
+{
+ return m_UnityWebStream;
+}
+
+bool WWW::IsCached () const
+{
+ if (m_UnityWebStream)
+ return m_UnityWebStream->IsCached();
+ else
+ return m_Cached;
+}
+
+bool WWW::IsDone() const
+{
+ /*
+ There is an issue with how IsReadyToPlay is updated that causes small assetbundles to fail downloading.
+ This causes AssetStore previews to fail in the Editor.
+ TODO: Figure out why IsReadyToPlay is never set to true.
+ */
+ return m_UnityWebStream ? m_UnityWebStream->IsFinished(): IsDownloadingDone();
+}
+
+void WWW::FeedUnityWebStream(bool isDone)
+{
+ // Check if this actually is a UnityWebStream file
+ if (!m_DidParseUnityWebStream)
+ {
+ UnityWebStreamHeader header;
+
+ // Safely parse stream header
+ int result;
+ {
+ WWW::AutoLock lock(*this);
+ const UInt8* partialData = GetPartialData();
+ result = ParseStreamHeader (header, partialData, partialData + GetPartialSize());
+ }
+
+ // Is Unity Web file
+ if (result == 0)
+ {
+ if(m_Cached)
+ m_UnityWebStream = UNITY_NEW_AS_ROOT(UnityWebStream(GetUrl(), m_CacheVersion, m_RequestedCRC), kMemFile, "WebStream", GetUrl());
+ else
+ m_UnityWebStream = UNITY_NEW_AS_ROOT(UnityWebStream(NULL, 0, m_RequestedCRC), kMemFile, "WebStream", GetUrl());
+#if SUPPORT_THREADS
+ m_UnityWebStream->SetDecompressionPriority(m_ThreadPriority);
+#endif
+ m_UnityWebStream->Retain();
+ m_DidParseUnityWebStream = true;
+ }
+ // Is not a Unity Web file
+ else if (result == 2)
+ {
+ m_DidParseUnityWebStream = true;
+ }
+ }
+
+ // Feed it the data that has arrived
+#if !UNITY_FLASH
+ if (m_UnityWebStream)
+ {
+ WWW::AutoLock lock(*this);
+
+ m_UnityWebStream->FeedDownloadData(GetPartialData() + m_StreamingPosition, GetPartialSize() - m_StreamingPosition, isDone);
+ m_StreamingPosition = GetPartialSize();
+ }
+#else
+ if(m_UnityWebStream && isDone){
+ //RH : TODO : Flash gets the whole stream in one go for now, but we can still chunk this if needs be (if we decide to use swf compression instead).
+ m_UnityWebStream->FeedDownloadData(GetData(), GetSize(), isDone);
+ }
+#endif
+}
+
+UInt32 WWW::GetEstimatedDownloadSize() const
+{
+ return m_UnityWebStream && m_UnityWebStream->DidParseHeader() ?
+ m_UnityWebStream->GetHeader().completeFileSize: 0u;
+}
+
+void WWW::Retain()
+{
+ m_RefCount.Retain();
+}
+
+void WWW::Release()
+{
+ if ( m_RefCount.Release() ) {
+ delete this;
+ }
+}
+
+#if WWW_USE_BROWSER
+#include "PlatformDependent/CommonWebPlugin/Download.h"
+Download* StartDownloadBinding (const char* url, void* userData, DownloadProgressFunction* callback, const void* headersData, size_t headersLength, const void* postData, size_t postLength );
+
+// Requires that m_Download exists
+int WWWBrowser::GetTotalBytesUntilLoadable() const
+{
+ Assert(m_Download);
+
+ if (GetUnityWebStream())
+ {
+ int res = GetUnityWebStream()->GetTotalBytesUntilLoadable();
+ if (res != -1)
+ return res;
+ }
+
+ return m_Download->GetTotalBytes();
+}
+
+// When all data was in browser's cache, the download might already be finished
+// (happens on IE6/IE7 at least). In this case our progress callback will not be
+// ever called. So check download here and call progress callback manually.
+// Case 15160, 15074.
+
+void WWWBrowser::ForceProgressDownload ()
+{
+ if( m_Download != NULL && (m_Download->GetStatus() == Download::kCompleted || m_Download->GetStatus() == Download::kFailed) )
+ {
+ WWWBrowser::ProgressDownload( m_Download );
+ }
+}
+
+extern int GetPluginVersion();
+
+int WWWBrowser::ProgressDownload(Download* download)
+{
+ if (download->GetUserData () == NULL)
+ return 0;
+
+ WWWBrowser& www = *reinterpret_cast<WWWBrowser*>(download->GetUserData ());
+
+ AssertIf(www.m_LockedPartialData != 0);
+ if (www.m_Download == NULL)
+ return 0;
+
+ AssertIf(www.m_Download != download);
+
+ // get the status before changing it later!
+ int downloadStatus = download->GetStatus();
+
+ // Detect if we are loading a unity web stream and feed it the data for live decompression
+ if (downloadStatus == Download::kLoading || downloadStatus == Download::kCompleted)
+ www.FeedUnityWebStream(downloadStatus == Download::kCompleted);
+
+ if( downloadStatus == Download::kLoading )
+ www.m_Eta = CalculateEta(download->GetDownloadedBytes(), www.GetTotalBytesUntilLoadable(), www.m_StartDownloadTime);
+
+ if( downloadStatus <= Download::kLoading )
+ return 0;
+
+ // Mark for destruction!
+ www.m_Download->SetStatus(Download::kDestroyAfterCallback);
+
+ if (GetPluginVersion() >= 5)
+ {
+ if(const char* headers = download->GetResponseHeaders())
+ {
+ www.m_ResponseHeaders.assign(headers);
+ www.SetErrorFromResponseHeaders ();
+ }
+ else
+ {
+ www.m_ResponseHeaders.clear();
+ }
+ }
+
+ // Error while downloading
+ if( downloadStatus != Download::kCompleted )
+ {
+ www.m_Error = "Failed downloading " + www.m_Url;
+ download->SetUserData (NULL);
+ www.m_Download = NULL;
+ return 0;
+ }
+
+ #if SUPPORT_REPRODUCE_LOG
+ CompleteWWWReproduce(&www, www.GetUrl(), download->GetDownloadedData(), download->GetDownloadedBytes());
+ #endif
+
+ if (www.GetUnityWebStream() == NULL)
+ {
+ // Move over the data. We can't just reference the data from Download, because it is allocated
+ // from a different DLL (on Windows each DLL has separate heaps).
+ std::size_t dsize = download->GetDownloadedBytes();
+ const UInt8* dfirst = download->GetDownloadedData();
+
+ www.m_Buffer.resize(dsize);
+ std::copy(dfirst, dfirst + dsize, www.m_Buffer.begin());
+ }
+ // Don't copy data for unitywebstream
+ else
+ {
+ www.m_Buffer.clear();
+ }
+ www.m_Eta = 0.0F;
+ download->SetUserData (NULL);
+ www.m_Download = NULL;
+
+ return 0;
+}
+
+WWWBrowser::WWWBrowser (const char* postDataPtr, int postDataLength, const WWWHeaders& i_Headers, bool cached, int cacheVersion , UInt32 crc)
+ : WWW(cached, cacheVersion, crc)
+
+ , m_Download(NULL)
+
+ , m_Buffer() // Empty
+{
+ m_Eta = std::numeric_limits<double>::infinity();
+ m_StartDownloadTime = GetTimeSinceStartup();
+ m_LockedPartialData = 0;
+
+ WWWHeaders headers = i_Headers; // Copy headers as we want to modify them before generating data
+
+ if( postDataPtr == NULL && postDataLength > 0 ) {
+ m_Error = kWWWErrNULLPostDataWithPositiveLength;
+ return;
+ }
+ if( postDataLength < 0 && !headers.empty() ) {
+ m_Error = kWWWErrCustomHeadersWithGET;
+ return;
+ }
+ if( postDataLength == 0 && postDataPtr != NULL) {
+ m_Error = kWWWErrZeroPostData;
+ return;
+ }
+
+ // Pepper adds it's own Content-Length header on post requests. Adding one here will cause an error.
+#if !UNITY_PEPPER
+ if( postDataLength > -1 && postDataPtr != NULL ) {
+ // Override the Content-Length header when we have post data
+ headers["Content-Length"] = IntToString( postDataLength );
+ }
+ else
+#endif
+ headers.erase("Content-Length");
+
+ // Put all headers into single headers string
+ m_HeadersString.clear();
+ for( WWWHeaders::iterator i = headers.begin(); i != headers.end(); ++i )
+ m_HeadersString += i->first + ": " + i->second +
+#if !UNITY_PEPPER
+ "\r\n"
+#else
+ // NaCl uses CORS validation to decide if it allows cross domain requests.
+ // Starting with Chrome 17, it will no longer allow any Unity requests with custom headers.
+ // Google explained that the validation would fail because we use \r which is an illegal
+ // character in the http headers string. We should probably also change it for non NaCl cases,
+ // but I don't want to risk anything at this rc stage.
+ "\n"
+#endif
+ ;
+ if( postDataLength >= 0 && postDataPtr != NULL && !m_HeadersString.empty() )
+ {
+ m_PostLength = postDataLength;
+
+ // Store a terminating byte after last byte in buffer just in case somebody thinks it's a string
+ m_PostData = new char[ m_PostLength + 1 ];
+ m_PostData[m_PostLength] = 0;
+ if( postDataLength > 0 )
+ memcpy( m_PostData, postDataPtr, postDataLength );
+ }
+ else
+ {
+ m_PostLength = -1;
+ m_PostData = NULL;
+ }
+
+ m_Cached = cached;
+ m_CacheVersion = cacheVersion;
+}
+
+void WWWBrowser::BlockUntilDone ()
+{
+ if (GetUnityWebStream())
+ GetUnityWebStream()->WaitForThreadDecompression();
+}
+
+WWWBrowser::~WWWBrowser ()
+{
+ #if UNITY_LINUX
+ #warning FIXME LINUX
+// if (m_Download != NULL) {
+// delete m_Download;
+// }
+ #else
+ if (m_Download != NULL)
+ {
+ m_Download->SetUserData (NULL);
+ m_Download->SetStatus(Download::kCancelAndDestroyAfterCallback);
+ m_Download = NULL;
+ }
+ #endif
+
+ delete[] m_PostData;
+}
+
+void WWWBrowser::Cancel ()
+{
+ m_Error = kWWWErrCancelled;
+ if (m_Download != NULL)
+ {
+ m_Download->SetUserData (NULL);
+ m_Download->SetStatus(Download::kCancelAndDestroyAfterCallback);
+ m_Download = NULL;
+ }
+}
+
+float WWWBrowser::GetProgress() const
+{
+ if (m_Download != NULL)
+ {
+ return GetDownloadProgress(m_Download->GetDownloadedBytes(), GetTotalBytesUntilLoadable());
+ }
+ else
+ return 1.0F;
+}
+
+float WWWBrowser::GetUploadProgress() const
+{
+ // @TODO: Implement this properly - currently we return 0.5 until download progress is > 0
+ if ( m_Download == NULL)
+ return 1.0F;
+ return ( m_Download->GetDownloadedBytes() )?1.0F:0.5F;
+}
+
+const char* WWWBrowser::GetError()
+{
+ return GetCachedWWWError(*this, m_Error);
+}
+
+void WWWBrowser::SetError (const std::string& error)
+{
+ m_Error = error;
+}
+
+const UInt8* WWWBrowser::GetPartialData() const
+{
+ AssertIf (m_LockedPartialData == 0);
+
+ if (!m_Buffer.empty())
+ return m_Buffer.data();
+ else if (m_Download && (m_Download->GetStatus() == Download::kLoading || m_Download->GetStatus() == Download::kCompleted))
+ {
+ return m_Download->GetDownloadedData ();
+ }
+ else
+ return NULL;
+}
+
+size_t WWWBrowser::GetPartialSize() const
+{
+ AssertIf (m_LockedPartialData == 0);
+
+ if (m_Buffer.empty() && m_Download != NULL)
+ return m_Download->GetDownloadedBytes ();
+
+ return m_Buffer.size();
+}
+
+void WWWBrowser::LockPartialData()
+{
+// ErrorString("Someone needs to think about and implement proper locking for this");
+ m_LockedPartialData++;
+}
+
+void WWWBrowser::UnlockPartialData()
+{
+ AssertIf(m_LockedPartialData == 0);
+ m_LockedPartialData--;
+// ErrorString("Someone needs to think about and implement proper locking for this");
+}
+
+double WWWBrowser::GetETA() const
+{
+ return m_Eta;
+}
+
+const UInt8* WWWBrowser::GetData()
+{
+ return m_Buffer.data();
+}
+
+size_t WWWBrowser::GetSize()
+{
+ return m_Buffer.size();
+}
+
+bool WWWBrowser::IsDownloadingDone() const
+{
+ return m_Download == NULL;
+}
+
+bool WWWBrowser::HasDownloadedOrMayBlock ()
+{
+ if (GetError () != NULL)
+ {
+ ErrorString(Format("You are trying to load data from a www stream which had the following error when downloading.\n%s", GetError()));
+ return false;
+ }
+
+ if (IsDone())
+ return true;
+ else
+ {
+ ErrorString("You are trying to load data from a www stream which has not completed the download yet.\nYou need to yield the download or wait until isDone returns true.");
+ return false;
+ }
+}
+
+const char* WWWBrowser::GetUrl() const
+{
+ return m_Url.c_str();
+}
+
+WWWBrowser* WWWBrowser::CreateBrowser (const char* url, const char* postDataPtr, int postDataLength, const WWWHeaders& headers, bool cached, int cacheVersion, UInt32 crc )
+{
+ WWWBrowser* www = new WWWBrowser (postDataPtr, postDataLength, headers, cached, cacheVersion, crc);
+ if (www->GetError() != NULL)
+ return www;
+
+ const char* actualUrl = url;
+ int actualPostLength = www->m_PostLength;
+
+ #if SUPPORT_REPRODUCE_LOG
+ string remappedurl;
+ CreateWWWReproduce(www, url, remappedurl, actualPostLength);
+ actualUrl = remappedurl.c_str();
+ #endif
+
+ Download* download = StartDownloadBinding(actualUrl, www, WWWBrowser::ProgressDownload, www->m_HeadersString.c_str(), www->m_HeadersString.size(), www->m_PostData, actualPostLength );
+ if (download)
+ {
+ www->m_Download = download;
+ www->m_Url = url;
+
+ #if SUPPORT_REPRODUCE_LOG
+ // Cant show the WWW as completed right away. Need to wait until next frame.
+ if (RunningReproduction())
+ {
+ return www;
+ }
+ #endif
+
+ // When all data was in browser's cache, the download might already be finished
+ // (happens on IE6/IE7 at least). In this case our progress callback will not be
+ // ever called. So check download here and call progress callback manually.
+ // Case 15160, 15074.
+ www->ForceProgressDownload();
+
+ return www;
+ }
+ else
+ {
+ printf_console("Failed browser download\n");
+ delete www;
+ return NULL;
+ }
+}
+
+#endif // WWW_USE_BROWSER
+
+// This function creates a WWW backend without cross domain checking; it's good to have it separated from WWW::Create, because this way it's simpler to debug
+// crossdomain checking routines, because they don't have to recursively call WWW::Create when download starts.
+static WWW* CreatePlatformWWWBackend (const char* url, const char* postDataPtr, int postDataLength, const WWW::WWWHeaders& headers, bool cached, int cacheVersion, UInt32 crc );
+
+WWW* WWW::Create (const char* url, const char* postDataPtr, int postDataLength, const WWWHeaders& headers, bool crossDomainChecked, bool cached, int cacheVersion, UInt32 crc )
+{
+#if ENABLE_WEBPLAYER_SECURITY
+
+#if SUPPORT_REPRODUCE_LOG
+ if (RunningReproduction()) crossDomainChecked = false;
+#endif
+
+#if UNITY_EDITOR
+ if (GetBuildTargetGroup( GetEditorUserBuildSettings().GetActiveBuildTarget ()) != kPlatformWebPlayer)
+ crossDomainChecked = false;
+#endif
+
+ if (crossDomainChecked)
+ {
+ return new WWWCrossDomainChecked(url, postDataPtr, postDataLength, headers, cached, cacheVersion, crc);
+ }
+
+#endif // ENABLE_WEBPLAYER_SECURITY
+ return CreatePlatformWWWBackend(url, postDataPtr, postDataLength, headers, cached, cacheVersion, crc);
+}
+
+static WWW* CreatePlatformWWWBackend (const char* url, const char* postDataPtr, int postDataLength, const WWW::WWWHeaders& headers, bool cached, int cacheVersion, UInt32 crc )
+{
+ #if WWW_USE_BROWSER
+ return WWWBrowser::CreateBrowser(url, postDataPtr, postDataLength, headers, cached, cacheVersion, crc);
+ #elif UNITY_IPHONE
+ return new iPhoneWWW(url, postDataPtr, postDataLength, headers, cached, cacheVersion, crc);
+ #elif UNITY_ANDROID
+ return new AndroidWWW(url, postDataPtr, postDataLength, headers, cached, cacheVersion, crc);
+ #elif UNITY_FLASH
+ return new WWWFlash(url, postDataPtr, postDataLength, headers, cached, cacheVersion, crc);
+ #elif UNITY_XENON
+ return new XenonWWW(url, postDataPtr, postDataLength, headers, cached, cacheVersion, crc);
+ #elif UNITY_PS3
+ return new PS3WWW(url, postDataPtr, postDataLength, headers, cached, cacheVersion, crc);
+ #elif UNITY_WINRT
+ return new WWWMetro(url, postDataPtr, postDataLength, headers, cached, cacheVersion, crc);
+ #elif WWW_USE_CURL
+ return new WWWCurl(url, postDataPtr, postDataLength, headers, cached, cacheVersion, crc);
+ #else
+ #error Unknown WWW backend
+ #endif
+}
+
+
+#if WWW_USE_CURL
+
+// Forward read callbacks from CURL to AppendBytes:
+size_t WWWCurl::WriteCallback(void * data, size_t size, size_t elements, WWWCurl * myData)
+{
+ if (myData->abortDownload)
+ return -1;
+
+ size_t res = myData->AppendBytes(data,size*elements);
+
+ myData->FeedUnityWebStream(false);
+
+ return res;
+}
+
+//If the server reports a Content-Length header, allocate the whole block of memory in advance.
+size_t WWWCurl::HeaderCallback(void * data, size_t size, size_t elements, WWWCurl * myData)
+{
+ if (myData->abortDownload)
+ return -1;
+
+ size_t len = size*elements;
+
+ char *str = (char*)alloca(len+1);
+
+ memcpy(str, data, size*elements);
+ str[size*elements] = '\0';
+
+ size_t totalSize = 0;
+ if(sscanf(str,"Content-Length:%d",&totalSize))
+ {
+ if(totalSize > myData->alloc_size) {
+ Mutex::AutoLock lock(myData->mutex);
+ myData->totalSize = totalSize;
+ myData->alloc_size = totalSize;
+ myData->data = (UInt8*)realloc((void*)myData->data, totalSize);
+ }
+ }
+ // Set response headers
+ {
+ Mutex::AutoLock lock(myData->mutex);
+ myData->m_ResponseHeaders.append(str);
+ myData->m_ResponseHeaders.append("\r\n");
+ }
+ myData->SetErrorFromResponseHeaders ();
+ return size*elements;
+}
+
+// Forward read callbacks from CURL to PostBytes:
+size_t WWWCurl::ReadCallback(void * data, size_t size, size_t elements, WWWCurl * myData)
+{
+ if (myData->abortDownload)
+ return -1;
+
+ size_t res = myData->PostBytes(data,size*elements);
+ return res;
+}
+
+int WWWCurl::ProgressCallback (WWWCurl *myData,
+ double dltotal,
+ double dlnow,
+ double ultotal,
+ double ulnow)
+{
+ if (myData->abortDownload)
+ return -1;
+
+ if( dltotal > 0 ) {
+ myData->totalSize = dltotal;
+ myData->progress = dlnow / dltotal;
+ myData->eta = CalculateEta (RoundfToInt(dlnow), RoundfToInt(dltotal), myData->startTime);
+ myData->uploadProgress = 1.0;
+ }
+ else if( ultotal > 0 ) {
+ myData->uploadProgress = ulnow / ultotal;
+ }
+ return 0;
+}
+
+UInt32 WWWCurl::GetEstimatedDownloadSize() const
+{
+ if (totalSize > 0)
+ return totalSize;
+ else
+ return WWW::GetEstimatedDownloadSize();
+}
+
+curl_slist* WWWCurl::GetHeaderSList () {
+ // Remove any previously generated headers
+ if(curlHeaders != NULL)
+ curl_slist_free_all(curlHeaders);
+
+ // Now begin with an empty slist
+ curlHeaders=NULL;
+
+ for(WWWHeaders::iterator i = requestHeaders.begin(); i != requestHeaders.end(); i++)
+ curlHeaders = curl_slist_append(curlHeaders, (i->first + ": " + i->second).c_str() );
+
+ return curlHeaders;
+}
+
+CURLcode WWWCurl::GetURL( const char* url) {
+ CURL* curl = curl_easy_init();
+ CURLcode res = 1;
+ if(curl) {
+
+ curl_easy_setopt(curl, CURLOPT_URL, url);
+ curl_easy_setopt(curl, CURLOPT_WRITEDATA, this);
+ curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, WriteCallback);
+ curl_easy_setopt(curl, CURLOPT_SSL_VERIFYPEER, 0); // We don't install CA certs with unity, so disable SSL cert validation
+ curl_easy_setopt(curl, CURLOPT_NOSIGNAL, 1); // Needs to be set when running cur in multible threads
+
+ // @TODO: Enable cookies using something like this:
+ // (or preferably something that does not require storing files on disk. expose cookies through Application.cookies[xx] or simmilar)
+ // curl_easy_setopt(curl, CURLOPT_COOKIEFILE, cookie_file);
+ // curl_easy_setopt(curl, CURLOPT_COOKIEJAR, cookie_file);
+
+ // If we have post data, the request should be a POST request
+ if(postData != NULL && postLength > -1) {
+ postPosition=0;
+ curl_easy_setopt(curl, CURLOPT_POST, 1L);
+ curl_easy_setopt(curl, CURLOPT_POSTFIELDSIZE, postLength);
+ curl_easy_setopt(curl, CURLOPT_READDATA, this);
+ curl_easy_setopt(curl, CURLOPT_READFUNCTION, ReadCallback);
+ requestHeaders["Content-Length"]=Format("%d",postLength);
+ }
+
+ curl_slist* headers = GetHeaderSList ();
+ if(headers != NULL)
+ curl_easy_setopt(curl, CURLOPT_HTTPHEADER, headers);
+
+ curl_easy_setopt(curl, CURLOPT_NOPROGRESS, 0);
+ curl_easy_setopt(curl, CURLOPT_PROGRESSDATA, this);
+ curl_easy_setopt(curl, CURLOPT_PROGRESSFUNCTION, ProgressCallback);
+// curl_easy_setopt(curl, CURLOPT_BUFFERSIZE, 16000);
+
+ curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 1L);
+ curl_easy_setopt(curl, CURLOPT_ERRORBUFFER, errorBuffer);
+ curl_easy_setopt(curl, CURLOPT_USERAGENT, "UnityPlayer/" UNITY_VERSION " (http://unity3d.com)");
+// curl_easy_setopt(curl, CURLOPT_TIMEOUT, 5);
+
+ curl_easy_setopt(curl, CURLOPT_HEADERDATA, this);
+ curl_easy_setopt(curl, CURLOPT_HEADERFUNCTION, HeaderCallback);
+
+ SetupCurlProxyServerBasedOnEnvironmentVariable(curl, url);
+
+ // curl_easy_setopt(curl, CURLOPT_HEADER, 1L); // TODO
+
+ res = curl_easy_perform(curl);
+
+ curl_easy_cleanup(curl);
+ }
+
+ progress = 1.0F;
+ uploadProgress = 1.0F;
+ eta = 0.0F;
+
+ return res;
+}
+
+void WWWCurl::SetThreadPriority( ThreadPriority priority )
+{
+ WWW::SetThreadPriority(priority);
+ thread.SetPriority(priority);
+}
+
+size_t WWWCurl::AppendBytes(void * moreData, size_t bytes)
+{
+ if(size+bytes > alloc_size)
+ {
+ Mutex::AutoLock lock(mutex);
+ const UInt32 estimatedDownloadSize = GetEstimatedDownloadSize();
+ if (alloc_size+bytes <= estimatedDownloadSize)
+ alloc_size = estimatedDownloadSize;
+ else
+ alloc_size = (alloc_size * 1.5) + bytes;
+ data = (UInt8*)realloc((void*)data, alloc_size);
+ if (!data)
+ {
+ ErrorString("WWW: out of memory");
+ return 0; // this will stop the transfer - not all data is loaded
+ }
+ }
+ if (!moreData){
+ return 0;
+ }
+ memcpy(data+size,moreData,bytes);
+ size += bytes;
+ return bytes;
+}
+
+std::string WWWCurl::GetResponseHeaders()
+{
+ Mutex::AutoLock lock(mutex);
+ return m_ResponseHeaders;
+}
+
+size_t WWWCurl::PostBytes(void * moreData, size_t bytes) {
+ if(bytes > postLength-postPosition)
+ bytes = postLength-postPosition;
+
+ if(bytes <= 0) return 0; // end of file
+
+ memcpy(moreData,postData+postPosition,bytes);
+ postPosition += bytes;
+ return bytes;
+}
+
+// A C wrapper for the pthread entry point
+void* WWWCurl::WWW_ThreadEntryPoint(void* data)
+{
+ WWWCurl& www = *(WWWCurl*)data;
+
+ www.result = www.GetURL(www.url);
+
+ if (!www.abortDownload && www.GetError() == NULL)
+ {
+ www.FeedUnityWebStream(true);
+ }
+
+ return data;
+}
+WWWCurl::WWWCurl( const char* in_url, const char * in_postData, int in_postLength, const WWWHeaders& in_headers, bool cached, int cacheVersion, UInt32 crc )
+ : WWW(cached, cacheVersion, crc)
+{
+ DoInit(in_url, in_postData, in_postLength, in_headers);
+}
+
+void WWWCurl::StartThread()
+{
+ thread.Run(&WWW_ThreadEntryPoint, (void*) this);
+}
+
+void WWWCurl::BlockUntilDone()
+{
+ thread.WaitForExit();
+
+ if (GetUnityWebStream())
+ GetUnityWebStream()->WaitForThreadDecompression();
+}
+
+bool WWWCurl::IsDownloadingDone() const
+{
+ return !thread.IsRunning();
+}
+
+const UInt8* WWWCurl::GetData()
+{
+ BlockUntilDone();
+ return data;
+}
+
+const UInt8* WWWCurl::GetPartialData() const
+{
+ return data;
+}
+
+void WWWCurl::LockPartialData()
+{
+ mutex.Lock();
+}
+
+void WWWCurl::UnlockPartialData()
+{
+ mutex.Unlock();
+}
+
+const char* WWWCurl::GetError()
+{
+ if (strlen (errorBuffer))
+ return errorBuffer;
+
+ std::string fullError;
+ const char* err = GetCachedWWWError(*this, fullError);
+ SetError(fullError); // Nothing will be set if empty
+ return err;
+}
+
+void WWWCurl::SetError (const std::string& error)
+{
+ strncpy (errorBuffer, error.c_str(), CURL_ERROR_SIZE);
+}
+
+
+size_t WWWCurl::GetSize()
+{
+ BlockUntilDone();
+ return size;
+}
+
+size_t WWWCurl::GetPartialSize() const
+{
+ return size;
+}
+
+
+const char* WWWCurl::GetUrl() const
+{
+ return url;
+}
+
+float WWWCurl::GetProgress() const
+{
+ return progress;
+}
+
+float WWWCurl::GetUploadProgress() const
+{
+ return uploadProgress;
+}
+
+double WWWCurl::GetETA() const
+{
+ if(IsDone())
+ return 0.0;
+ else
+ return eta;
+}
+
+
+bool WWWCurl::HasDownloadedOrMayBlock ()
+{
+ if (GetError () != NULL)
+ {
+ ErrorString(Format("You are trying to load data from a www stream which had the following error when downloading.\n%s", GetError()));
+ return false;
+ }
+
+ if (IsDone())
+ return true;
+ else
+ {
+ // File based curl's may block
+ if( BeginsWithCaseInsensitive(url, "file://") )
+ return true;
+
+ ErrorString("You are trying to load data from a www stream which has not completed the download yet.\nYou need to yield the download or wait until isDone returns true.");
+ return false;
+ }
+}
+
+
+void WWWCurl::DoInit( const char* in_url, const char * in_postData, int in_postLength, const WWWHeaders& in_headers )
+{
+ alloc_size=0;
+ size=0;
+ postData=NULL;
+ postLength=-1;
+ postPosition=0;
+ data=NULL;
+ errorBuffer=(char*)calloc(CURL_ERROR_SIZE,sizeof(char));
+ AssertIf(!errorBuffer);
+ errorBuffer[0] = 0;
+ abortDownload = false;
+ progress = 0;
+ totalSize = 0;
+ uploadProgress = 0;
+ startTime = GetTimeSinceStartup();
+ curlHeaders=NULL;
+ eta = std::numeric_limits<double>::infinity();
+
+ url = (char*)malloc(strlen(in_url)+1);
+ AssertIf(!url);
+ strcpy(url, in_url);
+
+ if( in_postData == NULL && in_postLength > 0 ) {
+ result=-1;
+ if (errorBuffer) free (errorBuffer);
+ errorBuffer=(char*)malloc(strlen(kWWWErrNULLPostDataWithPositiveLength)+1);
+ strcpy(errorBuffer, kWWWErrNULLPostDataWithPositiveLength);
+ progress = 1.0F;
+ return;
+ }
+ if( in_postLength == 0 && in_postData != NULL) {
+ result=-1;
+ if (errorBuffer) free (errorBuffer);
+ errorBuffer=(char*)malloc(strlen(kWWWErrZeroPostData)+1);
+ if(errorBuffer)
+ strcpy(errorBuffer, kWWWErrZeroPostData);
+ progress = 1.0F;
+ return;
+ }
+
+
+ requestHeaders = in_headers;
+
+ if(in_postData != NULL && in_postLength > -1) {
+ postLength=in_postLength;
+ postData = new char[in_postLength];
+ memcpy((void *)postData, (const void*)in_postData, in_postLength);
+ }
+
+ result = 0;
+
+ StartThread();
+}
+
+WWWCurl::~WWWCurl()
+{
+ abortDownload = true;
+ thread.WaitForExit();
+
+ // Abort decompressionthreads that the curl thread might have spawned
+ if (GetUnityWebStream())
+ GetUnityWebStream()->AbortThreadDecompression ();
+
+ free(data);
+ free(errorBuffer);
+ if(postData != NULL) delete[](postData);
+ if(curlHeaders != NULL)
+ curl_slist_free_all(curlHeaders);
+
+ free(url);
+}
+
+void WWWCurl::Cancel ()
+{
+ strcpy(errorBuffer, kWWWErrCancelled);
+ if (thread.IsRunning ())
+ {
+ abortDownload = true;
+ }
+}
+
+
+#endif // WWW_USE_CURL
+
+WWWDelayCall::WWWDelayCall(WWW* www, DelayedCall* func, Object* o, void* userData, CleanupUserData* cleanup){
+ m_wait_for=www;
+ m_wait_for->Retain();
+ m_func=func;
+ m_o=o;
+ m_userData=userData;
+ m_cleanup=cleanup;
+}
+
+WWWDelayCall::~WWWDelayCall() {
+ m_wait_for->Release();
+ if (m_cleanup != NULL) {
+ m_cleanup(m_userData);
+ }
+}
+
+void WWWDelayCall::Cleanup(void* userData) {
+ AssertIf(userData == NULL);
+ WWWDelayCall* delayCall = (WWWDelayCall*) userData;
+ delete delayCall;
+}
+
+void WWWDelayCall::Callback(Object* o, void* userData) {
+ WWWDelayCall* delayCall = (WWWDelayCall*) userData;
+ AssertIf(delayCall->m_o != o);
+
+ if(delayCall->m_wait_for == NULL || delayCall->m_wait_for->IsDone()) {
+ delayCall->m_func(o, delayCall->m_userData);
+ GetDelayedCallManager ().CancelCallDelayed(o,WWWDelayCall::Callback, WWWDelayCall::MatchForCancel, userData);
+ }
+}
+
+bool WWWDelayCall::MatchForCancel(void* callBackUserData, void* cancelUserData) {
+ return callBackUserData == cancelUserData;
+}
+
+void WWW::CallWhenDone(DelayedCall* func, Object* o, void* userData, CleanupUserData* cleanup) {
+ WWWDelayCall* delayCall = new WWWDelayCall(this, func, o, userData, cleanup);
+ CallDelayed(WWWDelayCall::Callback, o, 0.0F, (void*)delayCall, -1.0F, WWWDelayCall::Cleanup, DelayedCallManager::kRunDynamicFrameRate | DelayedCallManager::kWaitForNextFrame);
+}
+
+#if ENABLE_WEBPLAYER_SECURITY
+
+#include "Runtime/Utilities/ArrayUtility.h"
+
+void ProcessCrossDomainRequestsFromNonMainThread()
+{
+ MonoMethod* monoMethod = mono_unity_find_method("CrossDomainPolicyParser.dll","UnityEngine","UnityCrossDomainHelper","ProcessRequestsFromOtherThreads");
+ ScriptingMethodPtr method = GetScriptingManager().GetScriptingMethodRegistry().GetMethod(monoMethod);
+ if (!method)
+ {
+ Assert("Unable to find ProcessRequestsFromOtherThreads");
+ return;
+ }
+
+ ScriptingInvocation(method).Invoke();
+}
+
+
+class WWWCrossDomainCheckedImpl : public WWW
+{
+private:
+ std::string m_URL;
+ const char* m_Error;
+ ScriptingMethodPtr m_ScriptingMethod;
+
+ // Serves only as a cache, nothing more
+ mutable SecurityPolicy m_CachedSecPolicy;
+
+public:
+ WWWCrossDomainCheckedImpl (const char* url, bool cached, int cacheVersion, UInt32 crc);
+ ~WWWCrossDomainCheckedImpl () {}
+
+ virtual SecurityPolicy GetSecurityPolicy() const;
+ virtual const UInt8* GetData() { return 0; }
+ virtual const UInt8* GetPartialData() const { return 0; }
+ virtual size_t GetSize() { return 0u; }
+ virtual size_t GetPartialSize() const { return 0u; }
+ virtual UnityWebStream* GetUnityWebStream () const { return 0; }
+
+ virtual double GetETA() const { return std::numeric_limits<double>::infinity(); }
+
+ virtual void LockPartialData() {}
+ virtual void UnlockPartialData() {}
+
+ // Returns true when the download is complete or failed.
+ virtual void Cancel() { m_ScriptingMethod = 0; }
+ virtual bool IsDownloadingDone() const;
+ virtual float GetProgress() const { return 0.0f; }
+ virtual float GetUploadProgress() const { return 0.0f; }
+ virtual const char* GetError() { return m_Error; }
+ virtual const char* GetUrl() const { return m_URL.c_str(); }
+ virtual bool HasDownloadedOrMayBlock () { return true; }
+ virtual void BlockUntilDone () { while ( !IsDownloadingDone() ) { /*Wait*/ } }
+
+ virtual WWWType GetType () const { return kWWWTypeCrossDomainChecked; }
+
+ bool HasNoErrors() const { return m_Error == 0; }
+
+ const std::string& GetUrlString() const { return m_URL; }
+ void SetErrorStringPtr (const char* error) { m_Error = error; }
+};
+
+
+inline static ScriptingMethodPtr GetCrossDomainPolicyParserScriptingMethod()
+{
+ MonoMethod* monoMethod = mono_unity_find_method("CrossDomainPolicyParser.dll","UnityEngine","UnityCrossDomainHelper","GetSecurityPolicy");
+ return GetScriptingManager().GetScriptingMethodRegistry().GetMethod(monoMethod);
+}
+
+WWWCrossDomainCheckedImpl::WWWCrossDomainCheckedImpl (const char* url, bool cached, int cacheVersion, UInt32 crc)
+ : WWW(cached, cacheVersion, crc)
+ , m_URL(url)
+ , m_Error(0) // Null error string
+ , m_ScriptingMethod(GetCrossDomainPolicyParserScriptingMethod())
+ , m_CachedSecPolicy(kSecurityPolicyDontKnowYet)
+{
+ if (!m_ScriptingMethod)
+ {
+ m_CachedSecPolicy = kSecurityPolicyDenyAccess;
+ AssertString("Unable to find GetSecurityPolicy");
+ }
+}
+
+WWW::SecurityPolicy WWWCrossDomainCheckedImpl::GetSecurityPolicy() const
+{
+ if( kSecurityPolicyDontKnowYet != m_CachedSecPolicy )
+ return m_CachedSecPolicy;
+
+ if( !m_ScriptingMethod )
+ return kSecurityPolicyDenyAccess;
+
+ ScriptingInvocation invocation(m_ScriptingMethod);
+ invocation.AddString(m_URL.c_str());
+ MonoObject* result = invocation.Invoke();
+
+ // Cache the security policy so that we don't need to invoke Mono repeatedly once we know the status
+ m_CachedSecPolicy = result ? *reinterpret_cast<SecurityPolicy*>(mono_object_unbox (result)):
+ kSecurityPolicyDenyAccess;
+
+ return m_CachedSecPolicy;
+}
+
+bool WWWCrossDomainCheckedImpl::IsDownloadingDone() const
+{
+ return GetSecurityPolicy() != kSecurityPolicyDontKnowYet;
+}
+
+WWWCrossDomainChecked::WWWCrossDomainChecked (const char* url, const char* postData, int postDataLength, const WWWHeaders& headers, bool cached, int cacheVersion, UInt32 crc)
+ : WWW (cached, cacheVersion, crc),
+ m_PostData(0),
+ m_PostDataLength(postDataLength),
+ m_Headers(headers),
+ m_CrossChecker(new WWWCrossDomainCheckedImpl(url, cached, cacheVersion, crc))
+{
+ if( postDataLength > 0 && postData != 0 )
+ {
+ m_PostDataDataCopy.assign(postData, postData + postDataLength);
+ m_PostData = m_PostDataDataCopy.c_str();
+ }
+
+ // m_WWW is guaranteed to be initialized thoughout the lifetime of this object. At first it points to
+ // m_CrossChecker, which contains the stubs of all the required WWW member functions. This allows the
+ // trivial forwarders to be, indeed, trivial. For example, GetETA() or UnlockPartialData() do not require
+ // any additional checks whether the crossdomain checking has finished, or not.
+ m_WWW = m_CrossChecker;
+ m_WWW->Retain();
+}
+
+WWWCrossDomainChecked::~WWWCrossDomainChecked ()
+{
+ m_WWW->Release(); // It's a no-op for the crossdomain checker,
+ // but will destroy an initialized WWW backend
+ m_CrossChecker->Release();
+}
+
+bool WWWCrossDomainChecked::RequestLooksSafeEnoughToMakeWithoutPolicyAccess() const
+{
+ /// We do this in order to completely prevent downloads that have failed the cross domain check.
+ /// As a result, if you download a png with a .txt extension it will not even download it thus you can't use it as a texture.
+ /// Using ".png" with a failing cross domain check is actually legal.
+ /// As long as you don't use .bytes on it, but instead just use it as a texture. It results in no security risk.
+ ///
+ /// In theory this check could be removed but it might be confusing for admins looking at unity seeing that secret.pdf actually gets downloaded.
+ /// Even though the script code can't actually access the data.
+
+ //if it has post headers, we'll deny.
+ if (m_PostData != 0) return false;
+
+ static const char* allowed_extensions[] = {
+ "png", "jpg", "jpeg", "tga", "tiff", // if it looks like a texture, we'll allow it
+ "wav", "mp3", "ogg", "xm", "mod", "s3m", "it" // if it looks like an audiofile, we'll allow it
+ };
+
+ const char** iter_first = allowed_extensions;
+ const char** iter_last = allowed_extensions + ARRAY_SIZE(allowed_extensions);
+ return std::find( iter_first, iter_last, ToLower(GetPathNameExtension (m_CrossChecker->GetUrlString())) ) != iter_last;
+}
+
+bool WWWCrossDomainChecked::CanCreateDownloader() const
+{
+ // At this point we must have our security policy
+ Assert( m_CrossChecker->IsDownloadingDone() );
+
+ return m_WWW == m_CrossChecker && // Still no WWW backend
+ m_CrossChecker->HasNoErrors(); // There are no crossdomain checker errors
+}
+
+void WWWCrossDomainChecked::StartEmbeddedDownload()
+{
+ if( CanCreateDownloader() )
+ {
+ // Download hasn't begun yet, so we check what kind of security policy
+ // we have, or what type of content we want to download
+ if( GetSecurityPolicy() == kSecurityPolicyAllowAccess || RequestLooksSafeEnoughToMakeWithoutPolicyAccess() )
+ {
+ m_WWW->Release(); // Release previously hold counter on m_CrossChecker
+
+ m_WWW = CreatePlatformWWWBackend(m_CrossChecker->GetUrl(), m_PostData, m_PostDataLength, m_Headers,
+ m_Cached, m_CacheVersion, m_RequestedCRC);
+
+ #if SUPPORT_THREADS
+ m_WWW->SetThreadPriority(GetThreadPriority());
+ #endif
+ }
+ else
+ {
+ m_CrossChecker->SetErrorStringPtr("Rejected because no crossdomain.xml policy file was found");
+ }
+ }
+}
+
+void WWWCrossDomainChecked::BlockedStartEmbeddedDownload()
+{
+ // First we block on cross checker.
+ m_CrossChecker->BlockUntilDone();
+
+ // Now the cross domain checker has finished, we want to block
+ // on the donwloader until it's done.
+ StartEmbeddedDownload();
+}
+
+void WWWCrossDomainChecked::LockPartialData()
+{
+ // The crosschecker might have done its job, so we would like to
+ // start the real download and then lock the downloader's partial data.
+ if( m_CrossChecker->IsDownloadingDone() )
+ {
+ StartEmbeddedDownload();
+ }
+
+ // If embedded download has started, then this locks WWW backend, otherwise this
+ // tries to lock m_CrossChecker, which is no-op.
+ m_WWW->LockPartialData();
+}
+
+void WWWCrossDomainChecked::BlockUntilDone ()
+{
+ BlockedStartEmbeddedDownload();
+ m_WWW->BlockUntilDone();
+}
+
+const UInt8* WWWCrossDomainChecked::GetData()
+{
+ BlockedStartEmbeddedDownload();
+ return m_WWW->GetData();
+}
+std::size_t WWWCrossDomainChecked::GetSize()
+{
+ BlockedStartEmbeddedDownload();
+ return m_WWW->GetSize();
+}
+
+bool WWWCrossDomainChecked::HasDownloadedOrMayBlock ()
+{
+ if (GetError () != NULL)
+ {
+ ErrorString(Format("You are trying to load data from a www stream which had the following error when downloading.\n%s", GetError()));
+ return false;
+ }
+ return m_WWW->HasDownloadedOrMayBlock();
+}
+
+WWW::SecurityPolicy WWWCrossDomainChecked::GetSecurityPolicy() const
+{
+ return m_CrossChecker->GetSecurityPolicy();
+}
+
+const char* WWWCrossDomainChecked::GetUrl() const
+{
+ return m_CrossChecker->GetUrl();
+}
+
+// Trivial forwarders to the currently running WWW backend:
+// m_WWW can either be CrossDomainChecker or the backend that is created in CreatePlatformWWWBackend.
+bool WWWCrossDomainChecked::IsDownloadingDone() const
+{
+ if( !m_CrossChecker->IsDownloadingDone() )
+ {
+ // Can't be done, because crossdomain check is not finished
+ return false;
+ }
+
+ if( CanCreateDownloader() ) // There is still no WWW backend
+ {
+ // Dummy PartialDataLock/PartialDataUnlock sequence; this may create
+ // a WWW backend if content downloading is allowed.
+ WWW::AutoLock lock(*const_cast<WWWCrossDomainChecked*>(this));
+ }
+
+ return m_WWW->IsDone();
+}
+
+const char* WWWCrossDomainChecked::GetError()
+{
+ return m_WWW->GetError();
+}
+
+void WWWCrossDomainChecked::Cancel()
+{
+ return m_WWW->Cancel();
+}
+
+void WWWCrossDomainChecked::SetThreadPriority( ThreadPriority priority )
+{
+ WWW::SetThreadPriority(priority);
+ return m_WWW->SetThreadPriority(priority);
+}
+
+// PartialData functions will forward to the m_CrossDomainChecker if locking
+// was not acquired. Those functions are dummy and, naturally, do nothing.
+// If crossdomain checking has succeeded and permissions were sufficient to start
+// a download, then these partial data items will properly redirect to a WWW backend's
+// apropriate functions.
+
+const UInt8* WWWCrossDomainChecked::GetPartialData() const
+{
+ return m_WWW->GetPartialData();
+}
+
+std::size_t WWWCrossDomainChecked::GetPartialSize() const
+{
+ return m_WWW->GetPartialSize();
+}
+
+void WWWCrossDomainChecked::UnlockPartialData()
+{
+ return m_WWW->UnlockPartialData();
+}
+
+std::string WWWCrossDomainChecked::GetResponseHeaders()
+{
+ return m_WWW->GetResponseHeaders();
+}
+
+float WWWCrossDomainChecked::GetProgress () const
+{
+ // WWWCrossDomainChecked requires polling IsDone on the WWW to do it's job.
+ // Usually users would query isDone from script code, or yield the www, but
+ // they might only use .progress or .audioClip.isReadyToPlay, which would never
+ // call isDone, so we call it here.
+ IsDone();
+ return m_WWW->GetProgress();
+}
+
+double WWWCrossDomainChecked::GetETA () const
+{
+ return m_WWW->GetETA();
+}
+
+UnityWebStream* WWWCrossDomainChecked::GetUnityWebStream() const
+{
+ return m_WWW->GetUnityWebStream();
+}
+
+bool WWWCrossDomainChecked::IsCached () const
+{
+ return m_WWW->IsCached();
+}
+
+#endif // ENABLE_WEBPLAYER_SECURITY
+
+// Decodes % Escaped URL string
+std::string DecodeEscapedURL(const std::string& url)
+{
+ std::string urlCopy;
+ urlCopy.reserve(url.length());
+
+ int newLength = 0;
+ for (unsigned int i = 0u; i < url.length(); i++)
+ {
+ if (url[i] != '%')
+ {
+ urlCopy += url[i];
+ }
+ else if (i + 2 < url.length())
+ {
+ char hex[2] = { url[i + 1], url[i + 2] };
+ using namespace std;//Flash fix.
+ urlCopy += static_cast<char>(strtol(hex, NULL, 16));
+ i += 2;
+ }
+ }
+
+ return urlCopy;
+}
+#endif //ENABLE_WWW