static void cookiejar_changed(SoupCookieJar *self, SoupCookie *old_cookie, SoupCookie *new_cookie) { FLOCK(COOKIEJAR(self)->lock, F_WRLCK); SoupDate *expire; if (new_cookie) { /* session-expire-time handling */ if (vb.config.cookie_expire_time == 0) { soup_cookie_set_expires(new_cookie, NULL); } else if (vb.config.cookie_expire_time > 0 && new_cookie->expires) { expire = soup_date_new_from_now(vb.config.cookie_expire_time); if (soup_date_to_time_t(expire) < soup_date_to_time_t(new_cookie->expires)) { soup_cookie_set_expires(new_cookie, expire); } soup_date_free(expire); } /* session-cookie handling */ if (!new_cookie->expires && vb.config.cookie_timeout) { expire = soup_date_new_from_now(vb.config.cookie_timeout); soup_cookie_set_expires(new_cookie, expire); soup_date_free(expire); } } SOUP_COOKIE_JAR_CLASS(cookiejar_parent_class)->changed(self, old_cookie, new_cookie); FLOCK(COOKIEJAR(self)->lock, F_UNLCK); }
/** * soup_date_new_from_string: * @date_string: the date in some plausible format * * Parses @date_string and tries to extract a date from it. This * recognizes all of the "HTTP-date" formats from RFC 2616, all ISO * 8601 formats containing both a time and a date, RFC 2822 dates, * and reasonable approximations thereof. (Eg, it is lenient about * whitespace, leading "0"s, etc.) * * Return value: a new #SoupDate, or %NULL if @date_string could not * be parsed. **/ SoupDate * soup_date_new_from_string (const char *date_string) { SoupDate *date; gboolean success; g_return_val_if_fail (date_string != NULL, NULL); date = g_slice_new (SoupDate); while (g_ascii_isspace (*date_string)) date_string++; /* If it starts with a digit, it's either an ISO 8601 date, or * an RFC2822 date without the optional weekday; in the later * case, there will be a month name later on, so look for one * of the month-start letters. */ if (g_ascii_isdigit (*date_string) && !strpbrk (date_string, "JFMASOND")) success = parse_iso8601_date (date, date_string); else success = parse_textual_date (date, date_string); if (!success) { g_slice_free (SoupDate, date); return NULL; } if (date->year < 1 || date->year > 9999 || date->month < 1 || date->month > 12 || date->day < 1 || date->day > days_in_month (date->month, date->year) || date->hour < 0 || date->hour > 24 || date->minute < 0 || date->minute > 59 || date->second < 0 || date->second > 61) { soup_date_free (date); return NULL; } if (date->hour == 24) { /* ISO8601 allows this explicitly. We allow it for * other types as well just for simplicity. */ if (date->minute == 0 && date->second == 0) soup_date_fixup (date); else { soup_date_free (date); return NULL; } } return date; }
static void sanity_check_date (RestProxyCall *call) { GHashTable *headers; SoupDate *call_date; const char *s; time_t call_time, diff; headers = rest_proxy_call_get_response_headers (call); s = g_hash_table_lookup (headers, "Date"); if (s) { call_date = soup_date_new_from_string (s); if (call_date) { call_time = soup_date_to_time_t (call_date); diff = labs (time (NULL) - call_time); /* More than five minutes difference between local time and the response time? */ if (diff > (60 * 5)) { g_warning ("%ld seconds difference between HTTP time and system time!", diff); } } soup_date_free (call_date); } g_hash_table_unref (headers); }
/* local_db_needs_update function returns TRUE on success and FALSE on failure. * It sets the parameter needs_update to TRUE if the local database needs * to be updated. */ static gboolean local_db_needs_update (SoupSession *session, const char *db_uri, GFile *db_local, gboolean *needs_update, GError **error) { GFileInfo *db_local_info; SoupMessage *msg; SoupDate *date; const gchar *db_time_str; guint64 db_time; guint64 db_local_time; guint status_code; if (g_file_query_exists (db_local, NULL) == FALSE) { *needs_update = TRUE; return TRUE; } msg = soup_message_new ("HEAD", db_uri); status_code = soup_session_send_message (session, msg); if (status_code != SOUP_STATUS_OK) { g_set_error_literal (error, SOUP_HTTP_ERROR, status_code, msg->reason_phrase); return FALSE; } db_time_str = soup_message_headers_get_one (msg->response_headers, "Last-Modified"); date = soup_date_new_from_string (db_time_str); db_time = (guint64) soup_date_to_time_t (date); soup_date_free (date); g_object_unref (msg); db_local_info = g_file_query_info (db_local, "time::modified", G_FILE_QUERY_INFO_NONE, NULL, error); if (!db_local_info) return FALSE; db_local_time = g_file_info_get_attribute_uint64 (db_local_info, "time::modified"); if (db_time <= db_local_time) *needs_update = FALSE; else *needs_update = TRUE; g_object_unref (db_local_info); return TRUE; }
static gint64 utils_http_full_date_to_timestamp(const char* string) { gint64 ret; SoupDate* tmp; tmp = soup_date_new_from_string(string); ret = soup_date_to_time_t(tmp); soup_date_free(tmp); return ret; }
gchar * twitter_http_date_from_time_t (time_t time_) { SoupDate *soup_date; gchar *retval; soup_date = soup_date_new_from_time_t (time_); retval = soup_date_to_string (soup_date, SOUP_DATE_HTTP); soup_date_free (soup_date); return retval; }
static gboolean check_ok (const char *strdate, SoupDate *date) { debug_printf (2, "%s\n", strdate); if (date && date->year == 2004 && date->month == 11 && date->day == 6 && date->hour == 8 && date->minute == 9 && date->second == 7) { soup_date_free (date); return TRUE; } debug_printf (1, " date parsing failed for '%s'.\n", strdate); if (date) { debug_printf (1, " got: %d %d %d - %d %d %d\n\n", date->year, date->month, date->day, date->hour, date->minute, date->second); soup_date_free (date); } errors++; return FALSE; }
static SoupCookie* toSoupCookie(const Cookie& cookie) { SoupCookie* soupCookie = soup_cookie_new(cookie.name.utf8().data(), cookie.value.utf8().data(), cookie.domain.utf8().data(), cookie.path.utf8().data(), -1); soup_cookie_set_http_only(soupCookie, cookie.httpOnly); soup_cookie_set_secure(soupCookie, cookie.secure); if (!cookie.session) { SoupDate* date = msToSoupDate(cookie.expires); soup_cookie_set_expires(soupCookie, date); soup_date_free(date); } return soupCookie; }
/* * Send a discovery response */ static gboolean discovery_response_timeout (gpointer user_data) { DiscoveryResponse *response = user_data; GSSDPClient *client; SoupDate *date; char *al, *date_str, *message; guint max_age; char *usn; GSSDPResourceGroup *self = response->resource->resource_group; GSSDPResourceGroupPrivate *priv; priv = gssdp_resource_group_get_instance_private (self); /* Send message */ client = priv->client; max_age = priv->max_age; al = construct_al (response->resource); usn = construct_usn (response->resource->usn, response->target, response->resource->target); date = soup_date_new_from_now (0); date_str = soup_date_to_string (date, SOUP_DATE_HTTP); soup_date_free (date); message = g_strdup_printf (SSDP_DISCOVERY_RESPONSE, (char *) response->resource->locations->data, al ? al : "", usn, gssdp_client_get_server_id (client), max_age, response->target, date_str); _gssdp_client_send_message (client, response->dest_ip, response->dest_port, message, _GSSDP_DISCOVERY_RESPONSE); g_free (message); g_free (date_str); g_free (al); g_free (usn); discovery_response_free (response); return FALSE; }
static void check_bad (const char *strdate, SoupDate *date) { debug_printf (2, "%s\n", strdate); if (!date) return; errors++; debug_printf (1, " date parsing succeeded for '%s'!\n", strdate); debug_printf (1, " got: %d %d %d - %d %d %d\n\n", date->year, date->month, date->day, date->hour, date->minute, date->second); soup_date_free (date); }
static void do_dateChange (SoupMessage *msg, SoupXMLRPCParams *params) { GVariant *args; GVariant *timestamp; SoupDate *date; GVariant *arg; int val; GError *error = NULL; if (!(args = parse_params (msg, params, "(va{si})"))) return; g_variant_get (args, "(v@a{si})", ×tamp, &arg); date = soup_xmlrpc_variant_get_datetime (timestamp, &error); if (!date) { soup_xmlrpc_message_set_fault (msg, SOUP_XMLRPC_FAULT_SERVER_ERROR_INVALID_METHOD_PARAMETERS, "%s", error->message); g_clear_error (&error); goto fail; } if (g_variant_lookup (arg, "tm_year", "i", &val)) date->year = val + 1900; if (g_variant_lookup (arg, "tm_mon", "i", &val)) date->month = val + 1; if (g_variant_lookup (arg, "tm_mday", "i", &val)) date->day = val; if (g_variant_lookup (arg, "tm_hour", "i", &val)) date->hour = val; if (g_variant_lookup (arg, "tm_min", "i", &val)) date->minute = val; if (g_variant_lookup (arg, "tm_sec", "i", &val)) date->second = val; soup_xmlrpc_message_set_response (msg, soup_xmlrpc_variant_new_datetime (date), NULL); soup_date_free (date); fail: g_variant_unref (args); g_variant_unref (arg); g_variant_unref (timestamp); }
time_t twitter_http_date_to_time_t (const gchar *date) { SoupDate *soup_date; time_t retval; g_return_val_if_fail (date != NULL, (time_t) -1); soup_date = soup_date_new_from_string (date); if (!soup_date) return (time_t) -1; retval = soup_date_to_time_t (soup_date); soup_date_free (soup_date); return retval; }
static void network_process_callback (SoupSession *session, SoupMessage *msg, gpointer user_data) { updateJobPtr job = (updateJobPtr)user_data; SoupDate *last_modified; const gchar *tmp = NULL; job->result->source = soup_uri_to_string (soup_message_get_uri(msg), FALSE); if (SOUP_STATUS_IS_TRANSPORT_ERROR (msg->status_code)) { job->result->returncode = msg->status_code; job->result->httpstatus = 0; } else { job->result->httpstatus = msg->status_code; job->result->returncode = 0; } debug1 (DEBUG_NET, "download status code: %d", msg->status_code); debug1 (DEBUG_NET, "source after download: >>>%s<<<", job->result->source); job->result->data = g_memdup (msg->response_body->data, msg->response_body->length+1); job->result->size = (size_t)msg->response_body->length; debug1 (DEBUG_NET, "%d bytes downloaded", job->result->size); job->result->contentType = g_strdup (soup_message_headers_get_content_type (msg->response_headers, NULL)); /* Update last-modified date */ tmp = soup_message_headers_get_one (msg->response_headers, "Last-Modified"); if (tmp) { /* The string may be badly formatted, which will make * soup_date_new_from_string() return NULL */ last_modified = soup_date_new_from_string (tmp); if (last_modified) { job->result->updateState->lastModified = soup_date_to_time_t (last_modified); soup_date_free (last_modified); } } /* Update ETag value */ tmp = soup_message_headers_get_one (msg->response_headers, "ETag"); if (tmp) { job->result->updateState->etag = g_strdup(tmp); } update_process_finished_job (job); }
gboolean twitter_date_to_time_val (const gchar *date, GTimeVal *time_) { time_t res; SoupDate *soup_date; g_return_val_if_fail (date != NULL, FALSE); g_return_val_if_fail (time_ != NULL, FALSE); soup_date = soup_date_new_from_string (date); if (soup_date) { res = soup_date_to_time_t (soup_date); soup_date_free (soup_date); time_->tv_sec = res; time_->tv_usec = 0; return TRUE; } #ifdef HAVE_STRPTIME { struct tm tmp; /* OMFG, what are they? insane? */ strptime (date, "%a %b %d %T %z %Y", &tmp); res = mktime (&tmp); if (res != 0) { time_->tv_sec = res; time_->tv_usec = 0; return TRUE; } } #endif /* HAVE_STRPTIME */ return FALSE; }
gint64 feed_get_element_date (FeedParser* fparser) { time_t date; gchar* content; date = 0; content = feed_get_element_string (fparser); if (content) { SoupDate* sdate; sdate = soup_date_new_from_string (content); date = soup_date_to_time_t (sdate); soup_date_free (sdate); g_free (content); } return ((gint64)date); }
static gboolean test_dateChange (void) { GHashTable *structval; SoupDate *date, *result; char *timestamp; GValue retval; gboolean ok; debug_printf (1, "dateChange (date, struct of ints -> time): "); date = soup_date_new (1970 + (rand () % 50), 1 + rand () % 12, 1 + rand () % 28, rand () % 24, rand () % 60, rand () % 60); if (debug_level >= 2) { timestamp = soup_date_to_string (date, SOUP_DATE_ISO8601_XMLRPC); debug_printf (2, "date: %s, {", timestamp); g_free (timestamp); } structval = soup_value_hash_new (); if (rand () % 3) { date->year = 1970 + (rand () % 50); debug_printf (2, "tm_year: %d, ", date->year - 1900); soup_value_hash_insert (structval, "tm_year", G_TYPE_INT, date->year - 1900); } if (rand () % 3) { date->month = 1 + rand () % 12; debug_printf (2, "tm_mon: %d, ", date->month - 1); soup_value_hash_insert (structval, "tm_mon", G_TYPE_INT, date->month - 1); } if (rand () % 3) { date->day = 1 + rand () % 28; debug_printf (2, "tm_mday: %d, ", date->day); soup_value_hash_insert (structval, "tm_mday", G_TYPE_INT, date->day); } if (rand () % 3) { date->hour = rand () % 24; debug_printf (2, "tm_hour: %d, ", date->hour); soup_value_hash_insert (structval, "tm_hour", G_TYPE_INT, date->hour); } if (rand () % 3) { date->minute = rand () % 60; debug_printf (2, "tm_min: %d, ", date->minute); soup_value_hash_insert (structval, "tm_min", G_TYPE_INT, date->minute); } if (rand () % 3) { date->second = rand () % 60; debug_printf (2, "tm_sec: %d, ", date->second); soup_value_hash_insert (structval, "tm_sec", G_TYPE_INT, date->second); } debug_printf (2, "} -> "); ok = (do_xmlrpc ("dateChange", &retval, SOUP_TYPE_DATE, date, G_TYPE_HASH_TABLE, structval, G_TYPE_INVALID) && check_xmlrpc (&retval, SOUP_TYPE_DATE, &result)); g_hash_table_destroy (structval); if (!ok) { soup_date_free (date); return FALSE; } if (debug_level >= 2) { timestamp = soup_date_to_string (result, SOUP_DATE_ISO8601_XMLRPC); debug_printf (2, "%s: ", timestamp); g_free (timestamp); } ok = ((date->year == result->year) && (date->month == result->month) && (date->day == result->day) && (date->hour == result->hour) && (date->minute == result->minute) && (date->second == result->second)); soup_date_free (date); soup_date_free (result); debug_printf (1, "%s\n", ok ? "OK!" : "WRONG!"); return ok; }
static void server_callback (SoupServer *server, SoupMessage *msg, const char *path, GHashTable *query, SoupClientContext *context, gpointer data) { const char *last_modified, *etag; const char *header; guint status = SOUP_STATUS_OK; if (msg->method != SOUP_METHOD_GET && msg->method != SOUP_METHOD_POST) { soup_message_set_status (msg, SOUP_STATUS_NOT_IMPLEMENTED); return; } header = soup_message_headers_get_one (msg->request_headers, "Test-Set-Expires"); if (header) { soup_message_headers_append (msg->response_headers, "Expires", header); } header = soup_message_headers_get_one (msg->request_headers, "Test-Set-Cache-Control"); if (header) { soup_message_headers_append (msg->response_headers, "Cache-Control", header); } last_modified = soup_message_headers_get_one (msg->request_headers, "Test-Set-Last-Modified"); if (last_modified) { soup_message_headers_append (msg->response_headers, "Last-Modified", last_modified); } etag = soup_message_headers_get_one (msg->request_headers, "Test-Set-ETag"); if (etag) { soup_message_headers_append (msg->response_headers, "ETag", etag); } header = soup_message_headers_get_one (msg->request_headers, "If-Modified-Since"); if (header && last_modified) { SoupDate *date; time_t lastmod, check; date = soup_date_new_from_string (last_modified); lastmod = soup_date_to_time_t (date); soup_date_free (date); date = soup_date_new_from_string (header); check = soup_date_to_time_t (date); soup_date_free (date); if (lastmod <= check) status = SOUP_STATUS_NOT_MODIFIED; } header = soup_message_headers_get_one (msg->request_headers, "If-None-Match"); if (header && etag) { if (!strcmp (header, etag)) status = SOUP_STATUS_NOT_MODIFIED; } if (status == SOUP_STATUS_OK) { GChecksum *sum; const char *body; sum = g_checksum_new (G_CHECKSUM_SHA256); g_checksum_update (sum, (guchar *)path, strlen (path)); if (last_modified) g_checksum_update (sum, (guchar *)last_modified, strlen (last_modified)); if (etag) g_checksum_update (sum, (guchar *)etag, strlen (etag)); body = g_checksum_get_string (sum); soup_message_set_response (msg, "text/plain", SOUP_MEMORY_COPY, body, strlen (body) + 1); g_checksum_free (sum); } soup_message_set_status (msg, status); }
/* Downloads a feed specified in the request structure, returns the downloaded data or NULL in the request structure. If the the webserver reports a permanent redirection, the feed url will be modified and the old URL 'll be freed. The request structure will also contain the HTTP status and the last modified string. */ void network_process_request (const updateJobPtr const job) { SoupMessage *msg; SoupDate *date; g_assert (NULL != job->request); debug1 (DEBUG_NET, "downloading %s", job->request->source); /* Prepare the SoupMessage */ msg = soup_message_new (job->request->postdata ? SOUP_METHOD_POST : SOUP_METHOD_GET, job->request->source); if (!msg) { g_warning ("The request for %s could not be parsed!", job->request->source); return; } /* Set the postdata for the request */ if (job->request->postdata) { soup_message_set_request (msg, "application/x-www-form-urlencoded", SOUP_MEMORY_STATIC, /* libsoup won't free the postdata */ job->request->postdata, strlen (job->request->postdata)); } /* Set the If-Modified-Since: header */ if (job->request->updateState && job->request->updateState->lastModified) { gchar *datestr; date = soup_date_new_from_time_t (job->request->updateState->lastModified); datestr = soup_date_to_string (date, SOUP_DATE_HTTP); soup_message_headers_append (msg->request_headers, "If-Modified-Since", datestr); g_free (datestr); soup_date_free (date); } /* Set the authentication */ if (!job->request->authValue && job->request->options && job->request->options->username && job->request->options->password) { SoupURI *uri = soup_message_get_uri (msg); soup_uri_set_user (uri, job->request->options->username); soup_uri_set_password (uri, job->request->options->password); } if (job->request->authValue) { soup_message_headers_append (msg->request_headers, "Authorization", job->request->authValue); } /* Add requested cookies */ if (job->request->updateState && job->request->updateState->cookies) { soup_message_headers_append (msg->request_headers, "Cookie", job->request->updateState->cookies); soup_message_disable_feature (msg, SOUP_TYPE_COOKIE_JAR); } /* TODO: Right now we send the msg, and if it requires authentication and * we didn't provide one, the petition fails and when the job is processed * it sees it needs authentication and displays a dialog, and if credentials * are entered, it queues a new job with auth credentials. Instead of that, * we should probably handle authentication directly here, connecting the * msg to a callback in case of 401 (see soup_message_add_status_code_handler()) * displaying the dialog ourselves, and requeing the msg if we get credentials */ /* If the feed has "dont use a proxy" selected, disable the proxy for the msg */ if ((job->request->options && job->request->options->dontUseProxy) || (network_get_proxy_host () == NULL)) soup_message_disable_feature (msg, SOUP_TYPE_PROXY_URI_RESOLVER); soup_session_queue_message (session, msg, network_process_callback, job); }
static SoupCookie* cookie_new_from_table(lua_State *L, gint idx, gchar **error) { SoupCookie *cookie = NULL; SoupDate *date; const gchar *name, *value, *domain, *path; name = value = domain = path = NULL; gboolean secure, http_only; gint expires; /* correct relative index */ if (idx < 0) idx = lua_gettop(L) + idx + 1; /* check for cookie table */ if (!lua_istable(L, idx)) { *error = g_strdup_printf("invalid cookie table, got %s", lua_typename(L, lua_type(L, idx))); return NULL; } #define IS_STRING (lua_isstring(L, -1) || lua_isnumber(L, -1)) #define IS_BOOLEAN (lua_isboolean(L, -1) || lua_isnil(L, -1)) #define IS_NUMBER (lua_isnumber(L, -1)) #define GET_PROP(prop, typname, typexpr, typfunc) \ lua_pushliteral(L, #prop); \ lua_rawget(L, idx); \ if ((typexpr)) { \ prop = typfunc(L, -1); \ lua_pop(L, 1); \ } else { \ *error = g_strdup_printf("invalid cookie." #prop " type, expected " \ #typname ", got %s", lua_typename(L, lua_type(L, -1))); \ return NULL; \ } /* get cookie properties */ GET_PROP(name, string, IS_STRING, lua_tostring) GET_PROP(value, string, IS_STRING, lua_tostring) GET_PROP(domain, string, IS_STRING, lua_tostring) GET_PROP(path, string, IS_STRING, lua_tostring) GET_PROP(secure, boolean, IS_BOOLEAN, lua_toboolean) GET_PROP(http_only, boolean, IS_BOOLEAN, lua_toboolean) GET_PROP(expires, number, IS_NUMBER, lua_tonumber) #undef IS_STRING #undef IS_BOOLEAN #undef IS_NUMBER #undef GET_PROP /* create soup cookie */ if ((cookie = soup_cookie_new(name, value, domain, path, expires))) { soup_cookie_set_secure(cookie, secure); soup_cookie_set_http_only(cookie, http_only); /* set real expiry date from unixtime */ if (expires > 0) { date = soup_date_new_from_time_t((time_t) expires); soup_cookie_set_expires(cookie, date); soup_date_free(date); } return cookie; } /* soup cookie creation failed */ *error = g_strdup_printf("soup cookie creation failed"); return NULL; }
static void check_conversion (const struct conversion *conv) { SoupDate *date; char *str; debug_printf (2, "%s\n", conv->source); date = make_date (conv->source); if (!date) { debug_printf (1, " date parsing failed for '%s'.\n", conv->source); errors++; return; } str = soup_date_to_string (date, SOUP_DATE_HTTP); if (!str || strcmp (str, conv->http) != 0) { debug_printf (1, " conversion of '%s' to HTTP failed:\n" " wanted: %s\n got: %s\n", conv->source, conv->http, str ? str : "(null)"); errors++; } g_free (str); str = soup_date_to_string (date, SOUP_DATE_COOKIE); if (!str || strcmp (str, conv->cookie) != 0) { debug_printf (1, " conversion of '%s' to COOKIE failed:\n" " wanted: %s\n got: %s\n", conv->source, conv->cookie, str ? str : "(null)"); errors++; } g_free (str); str = soup_date_to_string (date, SOUP_DATE_RFC2822); if (!str || strcmp (str, conv->rfc2822) != 0) { debug_printf (1, " conversion of '%s' to RFC2822 failed:\n" " wanted: %s\n got: %s\n", conv->source, conv->rfc2822, str ? str : "(null)"); errors++; } g_free (str); str = soup_date_to_string (date, SOUP_DATE_ISO8601_COMPACT); if (!str || strcmp (str, conv->compact) != 0) { debug_printf (1, " conversion of '%s' to COMPACT failed:\n" " wanted: %s\n got: %s\n", conv->source, conv->compact, str ? str : "(null)"); errors++; } g_free (str); str = soup_date_to_string (date, SOUP_DATE_ISO8601_FULL); if (!str || strcmp (str, conv->full) != 0) { debug_printf (1, " conversion of '%s' to FULL failed:\n" " wanted: %s\n got: %s\n", conv->source, conv->full, str ? str : "(null)"); errors++; } g_free (str); str = soup_date_to_string (date, SOUP_DATE_ISO8601_XMLRPC); if (!str || strcmp (str, conv->xmlrpc) != 0) { debug_printf (1, " conversion of '%s' to XMLRPC failed:\n" " wanted: %s\n got: %s\n", conv->source, conv->xmlrpc, str ? str : "(null)"); errors++; } g_free (str); soup_date_free (date); }
static void got_headers (SoupMessage *req, SoupClientContext *client) { SoupServer *server = client->server; SoupServerPrivate *priv = SOUP_SERVER_GET_PRIVATE (server); SoupURI *uri; SoupDate *date; char *date_string; SoupAuthDomain *domain; GSList *iter; gboolean rejected = FALSE; char *auth_user; if (!priv->raw_paths) { char *decoded_path; uri = soup_message_get_uri (req); decoded_path = soup_uri_decode (uri->path); soup_uri_set_path (uri, decoded_path); g_free (decoded_path); } /* Add required response headers */ date = soup_date_new_from_now (0); date_string = soup_date_to_string (date, SOUP_DATE_HTTP); soup_message_headers_replace (req->response_headers, "Date", date_string); g_free (date_string); soup_date_free (date); /* Now handle authentication. (We do this here so that if * the request uses "Expect: 100-continue", we can reject it * immediately rather than waiting for the request body to * be sent. */ for (iter = priv->auth_domains; iter; iter = iter->next) { domain = iter->data; if (soup_auth_domain_covers (domain, req)) { auth_user = soup_auth_domain_accepts (domain, req); if (auth_user) { client->auth_domain = g_object_ref (domain); client->auth_user = auth_user; return; } rejected = TRUE; } } /* If no auth domain rejected it, then it's ok. */ if (!rejected) return; for (iter = priv->auth_domains; iter; iter = iter->next) { domain = iter->data; if (soup_auth_domain_covers (domain, req)) soup_auth_domain_challenge (domain, req); } }
static void resource_available (GSSDPResourceBrowser *resource_browser, SoupMessageHeaders *headers) { GSSDPResourceBrowserPrivate *priv; const char *usn; const char *header; Resource *resource; gboolean was_cached; guint timeout; GList *locations; gboolean destroyLocations; GList *it1, *it2; char *canonical_usn; priv = gssdp_resource_browser_get_instance_private (resource_browser); usn = soup_message_headers_get_one (headers, "USN"); if (!usn) return; /* No USN specified */ /* Build list of locations */ locations = NULL; destroyLocations = TRUE; header = soup_message_headers_get_one (headers, "Location"); if (header) locations = g_list_append (locations, g_strdup (header)); header = soup_message_headers_get_one (headers, "AL"); if (header) { /* Parse AL header. The format is: * <uri1><uri2>... */ const char *start, *end; char *uri; start = header; while ((start = strchr (start, '<'))) { start += 1; if (!start || !*start) break; end = strchr (start, '>'); if (!end || !*end) break; uri = g_strndup (start, end - start); locations = g_list_append (locations, uri); start = end; } } if (!locations) return; /* No location specified */ if (priv->version > 0) { char *version; version = g_strrstr (usn, ":"); canonical_usn = g_strndup (usn, version - usn); } else { canonical_usn = g_strdup (usn); } /* Get from cache, if possible */ resource = g_hash_table_lookup (priv->resources, canonical_usn); /* Put usn into fresh resources, so this resource will not be * removed on cache refreshing. */ if (priv->fresh_resources != NULL) { g_hash_table_add (priv->fresh_resources, g_strdup (canonical_usn)); } /* If location does not match, expect that we missed bye bye packet */ if (resource) { for (it1 = locations, it2 = resource->locations; it1 && it2; it1 = it1->next, it2 = it2->next) { if (strcmp ((const char *) it1->data, (const char *) it2->data) != 0) { resource_unavailable (resource_browser, headers); /* Will be destroyed by resource_unavailable */ resource = NULL; break; } } } if (resource) { /* Remove old timeout */ g_source_destroy (resource->timeout_src); was_cached = TRUE; } else { /* Create new Resource data structure */ resource = g_slice_new (Resource); resource->resource_browser = resource_browser; resource->usn = g_strdup (usn); resource->locations = locations; destroyLocations = FALSE; /* Ownership passed to resource */ g_hash_table_insert (priv->resources, canonical_usn, resource); was_cached = FALSE; /* hash-table takes ownership of this */ canonical_usn = NULL; } g_free (canonical_usn); /* Calculate new timeout */ header = soup_message_headers_get_one (headers, "Cache-Control"); if (header) { GSList *list; int res; res = 0; for (list = soup_header_parse_list (header); list; list = list->next) { res = sscanf (list->data, "max-age = %d", &timeout); if (res == 1) break; } if (res != 1) { g_warning ("Invalid 'Cache-Control' header. Assuming " "default max-age of %d.\n" "Header was:\n%s", SSDP_DEFAULT_MAX_AGE, header); timeout = SSDP_DEFAULT_MAX_AGE; } soup_header_free_list (list); } else { const char *expires; expires = soup_message_headers_get_one (headers, "Expires"); if (expires) { SoupDate *soup_exp_time; time_t exp_time, cur_time; soup_exp_time = soup_date_new_from_string (expires); exp_time = soup_date_to_time_t (soup_exp_time); soup_date_free (soup_exp_time); cur_time = time (NULL); if (exp_time > cur_time) timeout = exp_time - cur_time; else { g_warning ("Invalid 'Expires' header. Assuming " "default max-age of %d.\n" "Header was:\n%s", SSDP_DEFAULT_MAX_AGE, expires); timeout = SSDP_DEFAULT_MAX_AGE; } } else { g_warning ("No 'Cache-Control' nor any 'Expires' " "header was specified. Assuming default " "max-age of %d.", SSDP_DEFAULT_MAX_AGE); timeout = SSDP_DEFAULT_MAX_AGE; } } resource->timeout_src = g_timeout_source_new_seconds (timeout); g_source_set_callback (resource->timeout_src, resource_expire, resource, NULL); g_source_attach (resource->timeout_src, g_main_context_get_thread_default ()); g_source_unref (resource->timeout_src); /* Only continue with signal emission if this resource was not * cached already */ if (!was_cached) { /* Emit signal */ g_signal_emit (resource_browser, signals[RESOURCE_AVAILABLE], 0, usn, locations); } /* Cleanup */ if (destroyLocations) g_list_free_full (locations, g_free); }
static void free_entry(HSTSEntry *entry) { soup_date_free(entry->expires_at); g_slice_free(HSTSEntry, entry); }
/** * twitter_date_to_time_val: * @date: a timestamp coming from Twitter * @time_: return location for a #GTimeVal * * Converts a Twitter date into a #GTimeVal. The timestamp is relative * to UTC. * * Return value: %TRUE if the conversion was successful */ gboolean twitter_date_to_time_val (const gchar *date, GTimeVal *time_) { time_t res; SoupDate *soup_date; g_return_val_if_fail (date != NULL, FALSE); g_return_val_if_fail (time_ != NULL, FALSE); /* XXX - this code is here in case there's a sudden onset of sanity * at Twitter and they switch to using any format supported by libsoup */ soup_date = soup_date_new_from_string (date); if (soup_date) { res = soup_date_to_time_t (soup_date); soup_date_free (soup_date); time_->tv_sec = res; time_->tv_usec = 0; return TRUE; } #ifdef HAVE_STRPTIME { struct tm tmp; /* OMFG, ctime()? really? what are they? insane? I swear, this is * what happens when you let ruby developers write public APIs * * what happened to ISO8601 and web datestamps? you work on the web, * people! */ strptime (date, "%a %b %d %T %z %Y", &tmp); #ifdef HAVE_TIMEGM time_->tv_sec = timegm (&tmp); time_->tv_usec = 0; return TRUE; #else { res = 0; if (tmp.tm_mon < 0 || tmp.tm_mon > 11) { time_->tv_sec = res; time_->tv_usec = 0; return FALSE; } res += (tmp.tm_year - 70) * 365; res += (tmp.tm_year - 68) / 4; res += days_before[tmp.tm_mon] + tmp.tm_mday - 1; if (tmp.tm_year % 4 == 0 && tmp.tm_mon < 2) res -= 1; res = ((((res * 24) + tmp.tm_hour) * 60) + tmp.tm_min) * 60 + tmp.tm_sec; time_->tv_sec = res; time_->tv_usec = 0; return TRUE; } #endif /* HAVE_TIMEGM */ } #endif /* HAVE_STRPTIME */ return FALSE; }
static void file_info_from_message (SoupMessage *msg, GFileInfo *info, GFileAttributeMatcher *matcher) { const char *text; GHashTable *params; char *basename; char *ed_name; basename = ed_name = NULL; /* prefer the filename from the Content-Disposition (rfc2183) header if one if present. See bug 551298. */ if (soup_message_headers_get_content_disposition (msg->response_headers, NULL, ¶ms)) { const char *name = g_hash_table_lookup (params, "filename"); if (name) basename = g_strdup (name); g_hash_table_destroy (params); } if (basename == NULL) { const SoupURI *uri; uri = soup_message_get_uri (msg); basename = http_uri_get_basename (uri->path); } g_debug ("basename:%s\n", basename); /* read http/1.1 rfc, until then we copy the local files * behaviour */ if (basename != NULL && (g_file_attribute_matcher_matches (matcher, G_FILE_ATTRIBUTE_STANDARD_DISPLAY_NAME) || g_file_attribute_matcher_matches (matcher, G_FILE_ATTRIBUTE_STANDARD_EDIT_NAME))) ed_name = gvfs_file_info_populate_names_as_local (info, basename); g_free (basename); g_free (ed_name); if (soup_message_headers_get_encoding (msg->response_headers) == SOUP_ENCODING_CONTENT_LENGTH) { goffset start, end, length; gboolean ret; ret = soup_message_headers_get_content_range (msg->response_headers, &start, &end, &length); if (ret && length != -1) { g_file_info_set_size (info, length); } else if (!ret) { length = soup_message_headers_get_content_length (msg->response_headers); g_file_info_set_size (info, length); } } g_file_info_set_file_type (info, G_FILE_TYPE_REGULAR); text = soup_message_headers_get_content_type (msg->response_headers, NULL); if (text) { GIcon *icon; g_file_info_set_content_type (info, text); g_file_info_set_attribute_string (info, G_FILE_ATTRIBUTE_STANDARD_FAST_CONTENT_TYPE, text); icon = g_content_type_get_icon (text); g_file_info_set_icon (info, icon); g_object_unref (icon); icon = g_content_type_get_symbolic_icon (text); g_file_info_set_symbolic_icon (info, icon); g_object_unref (icon); } text = soup_message_headers_get_one (msg->response_headers, "Last-Modified"); if (text) { SoupDate *sd; GTimeVal tv; sd = soup_date_new_from_string(text); if (sd) { soup_date_to_timeval (sd, &tv); g_file_info_set_modification_time (info, &tv); soup_date_free (sd); } } text = soup_message_headers_get_one (msg->response_headers, "ETag"); if (text) { g_file_info_set_attribute_string (info, G_FILE_ATTRIBUTE_ETAG_VALUE, text); } }
/* Downloads a feed specified in the request structure, returns the downloaded data or NULL in the request structure. If the webserver reports a permanent redirection, the feed url will be modified and the old URL 'll be freed. The request structure will also contain the HTTP status and the last modified string. */ void network_process_request (const updateJobPtr job) { SoupMessage *msg; SoupDate *date; gboolean do_not_track = FALSE; g_assert (NULL != job->request); debug1 (DEBUG_NET, "downloading %s", job->request->source); if (job->request->postdata && (debug_level & DEBUG_VERBOSE) && (debug_level & DEBUG_NET)) debug1 (DEBUG_NET, " postdata=>>>%s<<<", job->request->postdata); /* Prepare the SoupMessage */ msg = soup_message_new (job->request->postdata ? SOUP_METHOD_POST : SOUP_METHOD_GET, job->request->source); if (!msg) { g_warning ("The request for %s could not be parsed!", job->request->source); return; } /* Set the postdata for the request */ if (job->request->postdata) { soup_message_set_request (msg, "application/x-www-form-urlencoded", SOUP_MEMORY_STATIC, /* libsoup won't free the postdata */ job->request->postdata, strlen (job->request->postdata)); } /* Set the If-Modified-Since: header */ if (job->request->updateState && update_state_get_lastmodified (job->request->updateState)) { gchar *datestr; date = soup_date_new_from_time_t (update_state_get_lastmodified (job->request->updateState)); datestr = soup_date_to_string (date, SOUP_DATE_HTTP); soup_message_headers_append (msg->request_headers, "If-Modified-Since", datestr); g_free (datestr); soup_date_free (date); } /* Set the If-None-Match header */ if (job->request->updateState && update_state_get_etag (job->request->updateState)) { soup_message_headers_append(msg->request_headers, "If-None-Match", update_state_get_etag (job->request->updateState)); } /* Set the I-AM header */ if (job->request->updateState && (update_state_get_lastmodified (job->request->updateState) || update_state_get_etag (job->request->updateState))) { soup_message_headers_append(msg->request_headers, "A-IM", "feed"); } /* Support HTTP content negotiation */ soup_message_headers_append(msg->request_headers, "Accept", "application/atom+xml,application/xml;q=0.9,text/xml;q=0.8,*/*;q=0.7"); /* Set the authentication */ if (!job->request->authValue && job->request->options && job->request->options->username && job->request->options->password) { SoupURI *uri = soup_message_get_uri (msg); soup_uri_set_user (uri, job->request->options->username); soup_uri_set_password (uri, job->request->options->password); } if (job->request->authValue) { soup_message_headers_append (msg->request_headers, "Authorization", job->request->authValue); } /* Add requested cookies */ if (job->request->updateState && job->request->updateState->cookies) { soup_message_headers_append (msg->request_headers, "Cookie", job->request->updateState->cookies); soup_message_disable_feature (msg, SOUP_TYPE_COOKIE_JAR); } /* TODO: Right now we send the msg, and if it requires authentication and * we didn't provide one, the petition fails and when the job is processed * it sees it needs authentication and displays a dialog, and if credentials * are entered, it queues a new job with auth credentials. Instead of that, * we should probably handle authentication directly here, connecting the * msg to a callback in case of 401 (see soup_message_add_status_code_handler()) * displaying the dialog ourselves, and requeing the msg if we get credentials */ /* Add Do Not Track header according to settings */ conf_get_bool_value (DO_NOT_TRACK, &do_not_track); if (do_not_track) soup_message_headers_append (msg->request_headers, "DNT", "1"); /* If the feed has "dont use a proxy" selected, use 'session2' which is non-proxy */ if (job->request->options && job->request->options->dontUseProxy) soup_session_queue_message (session2, msg, network_process_callback, job); else soup_session_queue_message (session, msg, network_process_callback, job); }