X-Git-Url: https://git.librecmc.org/?a=blobdiff_plain;f=src%2Ffs%2Ffs_uri.c;h=0c2d64caca85186b4c5247e039e62ac935c994e1;hb=04630c5e40fc4de16393894d0b5ff2ca9055f4e2;hp=d3fcdd8ca289dee2da44410dc89df93d59a67e35;hpb=cdee03748e83189713b32bb87bc77cde659c20d9;p=oweals%2Fgnunet.git diff --git a/src/fs/fs_uri.c b/src/fs/fs_uri.c index d3fcdd8ca..0c2d64cac 100644 --- a/src/fs/fs_uri.c +++ b/src/fs/fs_uri.c @@ -82,6 +82,12 @@ #include "gnunet_fs_service.h" #include "gnunet_signatures.h" #include "fs_api.h" +#include +#include +#include +#include +#include + /** @@ -208,7 +214,7 @@ percent_decode_keyword (const char *in, char **emsg) { if (out[rpos] == '%') { - if (1 != sscanf (&out[rpos + 1], "%2X", &hx)) + if (1 != SSCANF (&out[rpos + 1], "%2X", &hx)) { GNUNET_free (out); *emsg = GNUNET_strdup (_("`%' must be followed by HEX number")); @@ -1372,14 +1378,16 @@ GNUNET_FS_uri_sks_to_string_fancy (struct GNUNET_CONFIGURATION_Handle *cfg, { char *ret; char *name; + char *unique_name; if (uri->type != sks) return NULL; - name = GNUNET_PSEUDONYM_id_to_name (cfg, &uri->data.sks.namespace); - if (name == NULL) - return GNUNET_FS_uri_to_string (uri); - GNUNET_asprintf (&ret, "%s: %s", name, uri->data.sks.identifier); + (void) GNUNET_PSEUDONYM_get_info (cfg, &uri->data.sks.namespace, + NULL, NULL, &name, NULL); + unique_name = GNUNET_PSEUDONYM_name_uniquify (cfg, &uri->data.sks.namespace, name, NULL); GNUNET_free (name); + GNUNET_asprintf (&ret, "%s: %s", unique_name, uri->data.sks.identifier); + GNUNET_free (unique_name); return ret; } @@ -1398,7 +1406,7 @@ GNUNET_FS_uri_test_ksk (const struct GNUNET_FS_Uri *uri) if (uri->type == ksk) { - for (i = uri->data.ksk.keywordCount - 1; i >= 0; i--) + for (i=0;i < uri->data.ksk.keywordCount; i++) GNUNET_assert (uri->data.ksk.keywords[i] != NULL); } #endif @@ -1494,6 +1502,69 @@ find_duplicate (const char *s, const char **array, int array_length) } +/** + * FIXME: comment + */ +static char * +normalize_metadata (enum EXTRACTOR_MetaFormat format, const char *data, + size_t data_len) +{ + uint8_t *free_str = NULL; + uint8_t *str_to_normalize = (uint8_t *) data; + uint8_t *normalized; + size_t r_len; + if (str_to_normalize == NULL) + return NULL; + /* Don't trust libextractor */ + if (format == EXTRACTOR_METAFORMAT_UTF8) + { + free_str = (uint8_t *) u8_check ((const uint8_t *) data, data_len); + if (free_str == NULL) + free_str = NULL; + else + format = EXTRACTOR_METAFORMAT_C_STRING; + } + if (format == EXTRACTOR_METAFORMAT_C_STRING) + { + free_str = u8_strconv_from_encoding (data, locale_charset (), iconveh_escape_sequence); + if (free_str == NULL) + return NULL; + } + + normalized = u8_tolower (str_to_normalize, strlen ((char *) str_to_normalize), NULL, UNINORM_NFD, NULL, &r_len); + /* free_str is allocated by libunistring internally, use free() */ + if (free_str != NULL) + free (free_str); + if (normalized != NULL) + { + /* u8_tolower allocates a non-NULL-terminated string! */ + free_str = GNUNET_malloc (r_len + 1); + memcpy (free_str, normalized, r_len); + free_str[r_len] = '\0'; + free (normalized); + normalized = free_str; + } + return (char *) normalized; +} + +/** + * Counts the number of UTF-8 characters (not bytes) in the string, + * returns that count. + */ +static size_t +u8_strcount (const uint8_t *s) +{ + size_t count; + ucs4_t c; + GNUNET_assert (s != NULL); + if (s[0] == 0) + return 0; + for (count = 0; s != NULL; count++) + s = u8_next (&c, s); + return count - 1; +} + + /** * Break the filename up by matching [], () and {} pairs to make * keywords. In case of nesting parentheses only the inner pair counts. @@ -1549,20 +1620,41 @@ get_keywords_from_parens (const char *s, char **array, int index) } if (match && (close_paren - open_paren > 1)) { + tmp = close_paren[0]; + close_paren[0] = '\0'; + /* Keywords must be at least 3 characters long */ + if (u8_strcount ((const uint8_t *) &open_paren[1]) <= 2) + { + close_paren[0] = tmp; + continue; + } if (NULL != array) { - tmp = close_paren[0]; - close_paren[0] = '\0'; - if (GNUNET_NO == find_duplicate ((const char *) &open_paren[1], (const char **) array, index + count)) + char *normalized; + if (GNUNET_NO == find_duplicate ((const char *) &open_paren[1], + (const char **) array, index + count)) { insert_non_mandatory_keyword ((const char *) &open_paren[1], array, index + count); count++; } - close_paren[0] = tmp; + normalized = normalize_metadata (EXTRACTOR_METAFORMAT_UTF8, + &open_paren[1], close_paren - &open_paren[1]); + if (normalized != NULL) + { + if (GNUNET_NO == find_duplicate ((const char *) normalized, + (const char **) array, index + count)) + { + insert_non_mandatory_keyword ((const char *) normalized, array, + index + count); + count++; + } + GNUNET_free (normalized); + } } else count++; + close_paren[0] = tmp; } } GNUNET_free (ss); @@ -1571,7 +1663,12 @@ get_keywords_from_parens (const char *s, char **array, int index) /** - * Break the filename up by "_", " " and "." (any other separators?) to make + * Where to break up keywords + */ +#define TOKENS "_. /-!?#&+@\"\'\\;:," + +/** + * Break the filename up by TOKENS to make * keywords. * * @param s string to break down. @@ -1592,16 +1689,33 @@ get_keywords_from_tokens (const char *s, char **array, int index) int seps = 0; ss = GNUNET_strdup (s); - for (p = strtok (ss, "_. "); p != NULL; p = strtok (NULL, "_, ")) + for (p = strtok (ss, TOKENS); p != NULL; p = strtok (NULL, TOKENS)) { + /* Keywords must be at least 3 characters long */ + if (u8_strcount ((const uint8_t *) p) <= 2) + continue; if (NULL != array) { + char *normalized; if (GNUNET_NO == find_duplicate (p, (const char **) array, index + seps)) { insert_non_mandatory_keyword (p, array, index + seps); seps++; } + normalized = normalize_metadata (EXTRACTOR_METAFORMAT_UTF8, + p, strlen (p)); + if (normalized != NULL) + { + if (GNUNET_NO == find_duplicate ((const char *) normalized, + (const char **) array, index + seps)) + { + insert_non_mandatory_keyword ((const char *) normalized, array, + index + seps); + seps++; + } + GNUNET_free (normalized); + } } else seps++; @@ -1609,7 +1723,7 @@ get_keywords_from_tokens (const char *s, char **array, int index) GNUNET_free (ss); return seps; } - +#undef TOKENS /** * Function called on each value in the meta data. @@ -1634,15 +1748,37 @@ gather_uri_data (void *cls, const char *plugin_name, const char *data_mime_type, const char *data, size_t data_len) { struct GNUNET_FS_Uri *uri = cls; + char *normalized_data; if ((format != EXTRACTOR_METAFORMAT_UTF8) && (format != EXTRACTOR_METAFORMAT_C_STRING)) return 0; - if (find_duplicate (data, (const char **) uri->data.ksk.keywords, uri->data.ksk.keywordCount)) - return GNUNET_OK; - insert_non_mandatory_keyword (data, - uri->data.ksk.keywords, uri->data.ksk.keywordCount); - uri->data.ksk.keywordCount++; + /* Keywords must be at least 3 characters long + * If given non-utf8 string it will, most likely, find it to be invalid, + * and will return the length of its valid part, skipping the keyword. + * If it does - fix the extractor, not this check! + */ + if (u8_strcount ((const uint8_t *) data) <= 2) + { + return 0; + } + normalized_data = normalize_metadata (format, data, data_len); + if (!find_duplicate (data, (const char **) uri->data.ksk.keywords, uri->data.ksk.keywordCount)) + { + insert_non_mandatory_keyword (data, + uri->data.ksk.keywords, uri->data.ksk.keywordCount); + uri->data.ksk.keywordCount++; + } + if (normalized_data != NULL) + { + if (!find_duplicate (normalized_data, (const char **) uri->data.ksk.keywords, uri->data.ksk.keywordCount)) + { + insert_non_mandatory_keyword (normalized_data, + uri->data.ksk.keywords, uri->data.ksk.keywordCount); + uri->data.ksk.keywordCount++; + } + GNUNET_free (normalized_data); + } return 0; } @@ -1675,7 +1811,7 @@ GNUNET_FS_uri_ksk_create_from_meta_data (const struct GNUNET_CONTAINER_MetaData if (ent > 0) { full_name = GNUNET_CONTAINER_meta_data_get_first_by_types (md, - EXTRACTOR_METATYPE_FILENAME, -1); + EXTRACTOR_METATYPE_GNUNET_ORIGINAL_FILENAME, -1); if (NULL != full_name) { filename = full_name; @@ -1684,8 +1820,9 @@ GNUNET_FS_uri_ksk_create_from_meta_data (const struct GNUNET_CONTAINER_MetaData tok_keywords = get_keywords_from_tokens (filename, NULL, 0); paren_keywords = get_keywords_from_parens (filename, NULL, 0); } + /* x2 because there might be a normalized variant of every keyword */ ret->data.ksk.keywords = GNUNET_malloc (sizeof (char *) * (ent - + tok_keywords + paren_keywords)); + + tok_keywords + paren_keywords) * 2); GNUNET_CONTAINER_meta_data_iterate (md, &gather_uri_data, ret); } if (tok_keywords > 0)