summaryrefslogtreecommitdiff
path: root/src/backend/tsearch
diff options
context:
space:
mode:
Diffstat (limited to 'src/backend/tsearch')
-rw-r--r--src/backend/tsearch/dict.c16
-rw-r--r--src/backend/tsearch/dict_ispell.c4
-rw-r--r--src/backend/tsearch/dict_simple.c4
-rw-r--r--src/backend/tsearch/dict_thesaurus.c20
-rw-r--r--src/backend/tsearch/spell.c14
-rw-r--r--src/backend/tsearch/to_tsany.c10
-rw-r--r--src/backend/tsearch/ts_locale.c2
-rw-r--r--src/backend/tsearch/ts_parse.c18
-rw-r--r--src/backend/tsearch/ts_typanalyze.c4
-rw-r--r--src/backend/tsearch/wparser.c22
-rw-r--r--src/backend/tsearch/wparser_def.c2
11 files changed, 58 insertions, 58 deletions
diff --git a/src/backend/tsearch/dict.c b/src/backend/tsearch/dict.c
index ba8a3d79a83..63655fb592d 100644
--- a/src/backend/tsearch/dict.c
+++ b/src/backend/tsearch/dict.c
@@ -37,19 +37,19 @@ ts_lexize(PG_FUNCTION_ARGS)
dict = lookup_ts_dictionary_cache(dictId);
res = (TSLexeme *) DatumGetPointer(FunctionCall4(&dict->lexize,
- PointerGetDatum(dict->dictData),
- PointerGetDatum(VARDATA_ANY(in)),
- Int32GetDatum(VARSIZE_ANY_EXHDR(in)),
- PointerGetDatum(&dstate)));
+ PointerGetDatum(dict->dictData),
+ PointerGetDatum(VARDATA_ANY(in)),
+ Int32GetDatum(VARSIZE_ANY_EXHDR(in)),
+ PointerGetDatum(&dstate)));
if (dstate.getnext)
{
dstate.isend = true;
ptr = (TSLexeme *) DatumGetPointer(FunctionCall4(&dict->lexize,
- PointerGetDatum(dict->dictData),
- PointerGetDatum(VARDATA_ANY(in)),
- Int32GetDatum(VARSIZE_ANY_EXHDR(in)),
- PointerGetDatum(&dstate)));
+ PointerGetDatum(dict->dictData),
+ PointerGetDatum(VARDATA_ANY(in)),
+ Int32GetDatum(VARSIZE_ANY_EXHDR(in)),
+ PointerGetDatum(&dstate)));
if (ptr != NULL)
res = ptr;
}
diff --git a/src/backend/tsearch/dict_ispell.c b/src/backend/tsearch/dict_ispell.c
index b4576bf1f87..8f61bd2830a 100644
--- a/src/backend/tsearch/dict_ispell.c
+++ b/src/backend/tsearch/dict_ispell.c
@@ -51,8 +51,8 @@ dispell_init(PG_FUNCTION_ARGS)
(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
errmsg("multiple DictFile parameters")));
NIImportDictionary(&(d->obj),
- get_tsearch_config_filename(defGetString(defel),
- "dict"));
+ get_tsearch_config_filename(defGetString(defel),
+ "dict"));
dictloaded = true;
}
else if (pg_strcasecmp(defel->defname, "AffFile") == 0)
diff --git a/src/backend/tsearch/dict_simple.c b/src/backend/tsearch/dict_simple.c
index c3613628800..a13cdc0743f 100644
--- a/src/backend/tsearch/dict_simple.c
+++ b/src/backend/tsearch/dict_simple.c
@@ -63,8 +63,8 @@ dsimple_init(PG_FUNCTION_ARGS)
{
ereport(ERROR,
(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
- errmsg("unrecognized simple dictionary parameter: \"%s\"",
- defel->defname)));
+ errmsg("unrecognized simple dictionary parameter: \"%s\"",
+ defel->defname)));
}
}
diff --git a/src/backend/tsearch/dict_thesaurus.c b/src/backend/tsearch/dict_thesaurus.c
index ccf057b5d64..1b6085add3a 100644
--- a/src/backend/tsearch/dict_thesaurus.c
+++ b/src/backend/tsearch/dict_thesaurus.c
@@ -410,10 +410,10 @@ compileTheLexeme(DictThesaurus *d)
else
{
ptr = (TSLexeme *) DatumGetPointer(FunctionCall4(&(d->subdict->lexize),
- PointerGetDatum(d->subdict->dictData),
- PointerGetDatum(d->wrds[i].lexeme),
- Int32GetDatum(strlen(d->wrds[i].lexeme)),
- PointerGetDatum(NULL)));
+ PointerGetDatum(d->subdict->dictData),
+ PointerGetDatum(d->wrds[i].lexeme),
+ Int32GetDatum(strlen(d->wrds[i].lexeme)),
+ PointerGetDatum(NULL)));
if (!ptr)
ereport(ERROR,
@@ -535,11 +535,11 @@ compileTheSubstitute(DictThesaurus *d)
{
lexized = (TSLexeme *) DatumGetPointer(
FunctionCall4(
- &(d->subdict->lexize),
- PointerGetDatum(d->subdict->dictData),
- PointerGetDatum(inptr->lexeme),
- Int32GetDatum(strlen(inptr->lexeme)),
- PointerGetDatum(NULL)
+ &(d->subdict->lexize),
+ PointerGetDatum(d->subdict->dictData),
+ PointerGetDatum(inptr->lexeme),
+ Int32GetDatum(strlen(inptr->lexeme)),
+ PointerGetDatum(NULL)
)
);
}
@@ -816,7 +816,7 @@ thesaurus_lexize(PG_FUNCTION_ARGS)
d->subdict = lookup_ts_dictionary_cache(d->subdictOid);
res = (TSLexeme *) DatumGetPointer(FunctionCall4(&(d->subdict->lexize),
- PointerGetDatum(d->subdict->dictData),
+ PointerGetDatum(d->subdict->dictData),
PG_GETARG_DATUM(1),
PG_GETARG_DATUM(2),
PointerGetDatum(NULL)));
diff --git a/src/backend/tsearch/spell.c b/src/backend/tsearch/spell.c
index 3bcc93fa1b7..10202504905 100644
--- a/src/backend/tsearch/spell.c
+++ b/src/backend/tsearch/spell.c
@@ -411,8 +411,8 @@ getNextFlagFromString(IspellDict *Conf, char **sflagset, char *sflag)
{
ereport(ERROR,
(errcode(ERRCODE_CONFIG_FILE_ERROR),
- errmsg("invalid character in affix flag \"%s\"",
- *sflagset)));
+ errmsg("invalid character in affix flag \"%s\"",
+ *sflagset)));
}
*sflagset += pg_mblen(*sflagset);
@@ -1088,7 +1088,7 @@ addCompoundAffixFlagValue(IspellDict *Conf, char *s, uint32 val)
Conf->mCompoundAffixFlag *= 2;
Conf->CompoundAffixFlags = (CompoundAffixFlag *)
repalloc((void *) Conf->CompoundAffixFlags,
- Conf->mCompoundAffixFlag * sizeof(CompoundAffixFlag));
+ Conf->mCompoundAffixFlag * sizeof(CompoundAffixFlag));
}
else
{
@@ -1306,7 +1306,7 @@ NIImportOOAffixes(IspellDict *Conf, const char *filename)
if (naffix == 0)
ereport(ERROR,
(errcode(ERRCODE_CONFIG_FILE_ERROR),
- errmsg("invalid number of flag vector aliases")));
+ errmsg("invalid number of flag vector aliases")));
/* Also reserve place for empty flag set */
naffix++;
@@ -1539,7 +1539,7 @@ isnewformat:
if (oldformat)
ereport(ERROR,
(errcode(ERRCODE_CONFIG_FILE_ERROR),
- errmsg("affix file contains both old-style and new-style commands")));
+ errmsg("affix file contains both old-style and new-style commands")));
tsearch_readline_end(&trst);
NIImportOOAffixes(Conf, filename);
@@ -1566,7 +1566,7 @@ MergeAffix(IspellDict *Conf, int a1, int a2)
{
Conf->lenAffixData *= 2;
Conf->AffixData = (char **) repalloc(Conf->AffixData,
- sizeof(char *) * Conf->lenAffixData);
+ sizeof(char *) * Conf->lenAffixData);
}
ptr = Conf->AffixData + Conf->nAffixData;
@@ -1664,7 +1664,7 @@ mkSPNode(IspellDict *Conf, int low, int high, int level)
*/
clearCompoundOnly = (FF_COMPOUNDONLY & data->compoundflag
- & makeCompoundFlags(Conf, Conf->Spell[i]->p.d.affix))
+ & makeCompoundFlags(Conf, Conf->Spell[i]->p.d.affix))
? false : true;
data->affix = MergeAffix(Conf, data->affix, Conf->Spell[i]->p.d.affix);
}
diff --git a/src/backend/tsearch/to_tsany.c b/src/backend/tsearch/to_tsany.c
index 18368d118e6..6400440756d 100644
--- a/src/backend/tsearch/to_tsany.c
+++ b/src/backend/tsearch/to_tsany.c
@@ -49,8 +49,8 @@ compareWORD(const void *a, const void *b)
int res;
res = tsCompareString(
- ((const ParsedWord *) a)->word, ((const ParsedWord *) a)->len,
- ((const ParsedWord *) b)->word, ((const ParsedWord *) b)->len,
+ ((const ParsedWord *) a)->word, ((const ParsedWord *) a)->len,
+ ((const ParsedWord *) b)->word, ((const ParsedWord *) b)->len,
false);
if (res == 0)
@@ -390,8 +390,8 @@ add_to_tsvector(void *_state, char *elem_value, int elem_len)
item_vector = make_tsvector(prs);
state->result = (TSVector) DirectFunctionCall2(tsvector_concat,
- TSVectorGetDatum(state->result),
- PointerGetDatum(item_vector));
+ TSVectorGetDatum(state->result),
+ PointerGetDatum(item_vector));
}
else
state->result = make_tsvector(prs);
@@ -472,7 +472,7 @@ pushval_morph(Datum opaque, TSQueryParserState state, char *strval, int lenval,
prs.words[count].word,
prs.words[count].len,
weight,
- ((prs.words[count].flags & TSL_PREFIX) || prefix));
+ ((prs.words[count].flags & TSL_PREFIX) || prefix));
pfree(prs.words[count].word);
if (cnt)
pushOperator(state, OP_AND, 0);
diff --git a/src/backend/tsearch/ts_locale.c b/src/backend/tsearch/ts_locale.c
index 2cc084770d2..1aa3e237338 100644
--- a/src/backend/tsearch/ts_locale.c
+++ b/src/backend/tsearch/ts_locale.c
@@ -296,7 +296,7 @@ lowerstr_with_len(const char *str, int len)
if (wlen < 0)
ereport(ERROR,
(errcode(ERRCODE_CHARACTER_NOT_IN_REPERTOIRE),
- errmsg("conversion from wchar_t to server encoding failed: %m")));
+ errmsg("conversion from wchar_t to server encoding failed: %m")));
Assert(wlen < len);
}
else
diff --git a/src/backend/tsearch/ts_parse.c b/src/backend/tsearch/ts_parse.c
index b612fb0e2cb..ad5dddff4b0 100644
--- a/src/backend/tsearch/ts_parse.c
+++ b/src/backend/tsearch/ts_parse.c
@@ -205,11 +205,11 @@ LexizeExec(LexizeData *ld, ParsedLex **correspondLexem)
ld->dictState.isend = ld->dictState.getnext = false;
ld->dictState.private_state = NULL;
res = (TSLexeme *) DatumGetPointer(FunctionCall4(
- &(dict->lexize),
- PointerGetDatum(dict->dictData),
- PointerGetDatum(curValLemm),
- Int32GetDatum(curValLenLemm),
- PointerGetDatum(&ld->dictState)
+ &(dict->lexize),
+ PointerGetDatum(dict->dictData),
+ PointerGetDatum(curValLemm),
+ Int32GetDatum(curValLenLemm),
+ PointerGetDatum(&ld->dictState)
));
if (ld->dictState.getnext)
@@ -295,10 +295,10 @@ LexizeExec(LexizeData *ld, ParsedLex **correspondLexem)
res = (TSLexeme *) DatumGetPointer(FunctionCall4(
&(dict->lexize),
- PointerGetDatum(dict->dictData),
- PointerGetDatum(curVal->lemm),
- Int32GetDatum(curVal->lenlemm),
- PointerGetDatum(&ld->dictState)
+ PointerGetDatum(dict->dictData),
+ PointerGetDatum(curVal->lemm),
+ Int32GetDatum(curVal->lenlemm),
+ PointerGetDatum(&ld->dictState)
));
if (ld->dictState.getnext)
diff --git a/src/backend/tsearch/ts_typanalyze.c b/src/backend/tsearch/ts_typanalyze.c
index 975623fa960..ab224b76b86 100644
--- a/src/backend/tsearch/ts_typanalyze.c
+++ b/src/backend/tsearch/ts_typanalyze.c
@@ -188,7 +188,7 @@ compute_tsvector_stats(VacAttrStats *stats,
lexemes_tab = hash_create("Analyzed lexemes table",
num_mcelem,
&hash_ctl,
- HASH_ELEM | HASH_FUNCTION | HASH_COMPARE | HASH_CONTEXT);
+ HASH_ELEM | HASH_FUNCTION | HASH_COMPARE | HASH_CONTEXT);
/* Initialize counters. */
b_current = 1;
@@ -396,7 +396,7 @@ compute_tsvector_stats(VacAttrStats *stats,
mcelem_values[i] =
PointerGetDatum(cstring_to_text_with_len(item->key.lexeme,
- item->key.length));
+ item->key.length));
mcelem_freqs[i] = (double) item->frequency / (double) nonnull_cnt;
}
mcelem_freqs[i++] = (double) minfreq / (double) nonnull_cnt;
diff --git a/src/backend/tsearch/wparser.c b/src/backend/tsearch/wparser.c
index 8f4727448f3..c9ce80a91af 100644
--- a/src/backend/tsearch/wparser.c
+++ b/src/backend/tsearch/wparser.c
@@ -186,8 +186,8 @@ prs_setup_firstcall(FuncCallContext *funcctx, Oid prsid, text *txt)
st->list = (LexemeEntry *) palloc(sizeof(LexemeEntry) * st->len);
prsdata = (void *) DatumGetPointer(FunctionCall2(&prs->prsstart,
- PointerGetDatum(VARDATA_ANY(txt)),
- Int32GetDatum(VARSIZE_ANY_EXHDR(txt))));
+ PointerGetDatum(VARDATA_ANY(txt)),
+ Int32GetDatum(VARSIZE_ANY_EXHDR(txt))));
while ((type = DatumGetInt32(FunctionCall3(&prs->prstoken,
PointerGetDatum(prsdata),
@@ -319,7 +319,7 @@ ts_headline_byid_opt(PG_FUNCTION_ARGS)
if (!OidIsValid(prsobj->headlineOid))
ereport(ERROR,
(errcode(ERRCODE_FEATURE_NOT_SUPPORTED),
- errmsg("text search parser does not support headline creation")));
+ errmsg("text search parser does not support headline creation")));
memset(&prs, 0, sizeof(HeadlineParsedText));
prs.lenwords = 32;
@@ -364,7 +364,7 @@ Datum
ts_headline(PG_FUNCTION_ARGS)
{
PG_RETURN_DATUM(DirectFunctionCall3(ts_headline_byid_opt,
- ObjectIdGetDatum(getTSCurrentConfig(true)),
+ ObjectIdGetDatum(getTSCurrentConfig(true)),
PG_GETARG_DATUM(0),
PG_GETARG_DATUM(1)));
}
@@ -373,7 +373,7 @@ Datum
ts_headline_opt(PG_FUNCTION_ARGS)
{
PG_RETURN_DATUM(DirectFunctionCall4(ts_headline_byid_opt,
- ObjectIdGetDatum(getTSCurrentConfig(true)),
+ ObjectIdGetDatum(getTSCurrentConfig(true)),
PG_GETARG_DATUM(0),
PG_GETARG_DATUM(1),
PG_GETARG_DATUM(2)));
@@ -407,7 +407,7 @@ ts_headline_jsonb_byid_opt(PG_FUNCTION_ARGS)
if (!OidIsValid(state->prsobj->headlineOid))
ereport(ERROR,
(errcode(ERRCODE_FEATURE_NOT_SUPPORTED),
- errmsg("text search parser does not support headline creation")));
+ errmsg("text search parser does not support headline creation")));
out = transform_jsonb_string_values(jb, state, action);
@@ -431,7 +431,7 @@ Datum
ts_headline_jsonb(PG_FUNCTION_ARGS)
{
PG_RETURN_DATUM(DirectFunctionCall3(ts_headline_jsonb_byid_opt,
- ObjectIdGetDatum(getTSCurrentConfig(true)),
+ ObjectIdGetDatum(getTSCurrentConfig(true)),
PG_GETARG_DATUM(0),
PG_GETARG_DATUM(1)));
}
@@ -449,7 +449,7 @@ Datum
ts_headline_jsonb_opt(PG_FUNCTION_ARGS)
{
PG_RETURN_DATUM(DirectFunctionCall4(ts_headline_jsonb_byid_opt,
- ObjectIdGetDatum(getTSCurrentConfig(true)),
+ ObjectIdGetDatum(getTSCurrentConfig(true)),
PG_GETARG_DATUM(0),
PG_GETARG_DATUM(1),
PG_GETARG_DATUM(2)));
@@ -484,7 +484,7 @@ ts_headline_json_byid_opt(PG_FUNCTION_ARGS)
if (!OidIsValid(state->prsobj->headlineOid))
ereport(ERROR,
(errcode(ERRCODE_FEATURE_NOT_SUPPORTED),
- errmsg("text search parser does not support headline creation")));
+ errmsg("text search parser does not support headline creation")));
out = transform_json_string_values(json, state, action);
@@ -507,7 +507,7 @@ Datum
ts_headline_json(PG_FUNCTION_ARGS)
{
PG_RETURN_DATUM(DirectFunctionCall3(ts_headline_json_byid_opt,
- ObjectIdGetDatum(getTSCurrentConfig(true)),
+ ObjectIdGetDatum(getTSCurrentConfig(true)),
PG_GETARG_DATUM(0),
PG_GETARG_DATUM(1)));
}
@@ -525,7 +525,7 @@ Datum
ts_headline_json_opt(PG_FUNCTION_ARGS)
{
PG_RETURN_DATUM(DirectFunctionCall4(ts_headline_json_byid_opt,
- ObjectIdGetDatum(getTSCurrentConfig(true)),
+ ObjectIdGetDatum(getTSCurrentConfig(true)),
PG_GETARG_DATUM(0),
PG_GETARG_DATUM(1),
PG_GETARG_DATUM(2)));
diff --git a/src/backend/tsearch/wparser_def.c b/src/backend/tsearch/wparser_def.c
index 0ce2e00eb29..e841a1ccf0e 100644
--- a/src/backend/tsearch/wparser_def.c
+++ b/src/backend/tsearch/wparser_def.c
@@ -2295,7 +2295,7 @@ mark_hl_fragments(HeadlineParsedText *prs, TSQuery query, int highlight,
{
if (!covers[i].in && !covers[i].excluded &&
(maxitems < covers[i].poslen || (maxitems == covers[i].poslen
- && minwords > covers[i].curlen)))
+ && minwords > covers[i].curlen)))
{
maxitems = covers[i].poslen;
minwords = covers[i].curlen;