source: cpp/frams/genetics/genooperators.cpp @ 1241

Last change on this file since 1241 was 1241, checked in by Maciej Komosinski, 12 months ago

No longer sort modifiers and cancel out antagonistic modifiers in f1 and f4; simplifying modifier sequences is now much less intrusive to allow for 2N distinct values of properties instead of only 2*N that resulted from earlier forced ordering (N is the number of same-letter upper- and lower-case characters in a modifier sequence)

  • Property svn:eol-style set to native
File size: 18.8 KB
RevLine 
[286]1// This file is a part of Framsticks SDK.  http://www.framsticks.com/
[1226]2// Copyright (C) 1999-2023  Maciej Komosinski and Szymon Ulatowski.
[286]3// See LICENSE.txt for details.
[109]4
5#include <ctype.h>  //isupper()
[779]6#include "genooperators.h"
[375]7#include <common/log.h>
[109]8#include <common/nonstd_math.h>
9#include <frams/util/rndutil.h>
[1233]10#include <algorithm> // std::min, std::max
[109]11
[968]12//
13// custom distributions for mutations of various parameters
14//
[168]15static double distrib_force[] =   // for '!'
[109]16{
[168]17        3,             // distribution 0 -__/ +1
18        0.001, 0.2,    // "slow" neurons
19        0.001, 1,
20        1, 1,          // "fast" neurons
[109]21};
[168]22static double distrib_inertia[] =  // for '='
[109]23{
[168]24        2,             // distribution 0 |..- +1
25        0, 0,          // "fast" neurons
26        0.7, 0.98,
[109]27};
[168]28static double distrib_sigmo[] =  // for '/'
[109]29{
[168]30        5,             // distribution -999 -..-^-..- +999
31        -999, -999,    //"perceptron"
32        999, 999,
33        -5, -1,        // nonlinear
34        1, 5,
35        -1, 1,         // ~linear
[109]36};
[968]37/*
38static double distrib_weight[] =
39{
405,                 // distribution -999 _-^_^-_ +999
41-999, 999,         // each weight value may be useful, especially...
42-5, -0.3,          // ...little non-zero values
43-3, -0.6,
440.6, 3,
450.3, 5,
46};
47*/
[109]48
[168]49int GenoOperators::roulette(const double *probtab, const int count)
[109]50{
[168]51        double sum = 0;
52        int i;
53        for (i = 0; i < count; i++) sum += probtab[i];
[896]54        double sel = rndDouble(sum);
[168]55        for (sum = 0, i = 0; i < count; i++) { sum += probtab[i]; if (sel < sum) return i; }
56        return -1;
[109]57}
58
[168]59bool GenoOperators::getMinMaxDef(ParamInterface *p, int i, double &mn, double &mx, double &def)
[109]60{
[168]61        mn = mx = def = 0;
62        int defined = 0;
63        if (p->type(i)[0] == 'f')
64        {
65                double _mn = 0, _mx = 1, _def = 0.5;
[743]66                defined = p->getMinMaxDouble(i, _mn, _mx, _def);
[765]67                if (defined == 1) _mx = _mn + 1000.0; //only min was defined, so let's set some arbitrary range, just to have some freedom. Assumes _mn is not close to maxdouble...
68                if (_mx < _mn && defined == 3) //only default was defined, so let's assume some arbitrary range. Again, no check for min/maxdouble...
69                {
70                        _mn = _def - 500.0;
71                        _mx = _def + 500.0;
72                }
[168]73                if (defined < 3) _def = (_mn + _mx) / 2.0;
74                mn = _mn; mx = _mx; def = _def;
75        }
76        if (p->type(i)[0] == 'd')
77        {
[247]78                paInt _mn = 0, _mx = 1, _def = 0;
[743]79                defined = p->getMinMaxInt(i, _mn, _mx, _def);
[765]80                if (defined == 1) _mx = _mn + 1000; //only min was defined, so let's set some arbitrary range, just to have some freedom. Assumes _mn is not close to maxint...
81                if (_mx < _mn && defined == 3) //only default was defined, so let's assume some arbitrary range. Again, no check for min/maxint...
82                {
83                        _mn = _def - 500;
84                        _mx = _def + 500;
85                }
[168]86                if (defined < 3) _def = (_mn + _mx) / 2;
87                mn = _mn; mx = _mx; def = _def;
88        }
89        return defined == 3;
[109]90}
91
[967]92bool GenoOperators::mutateRandomNeuroClassProperty(Neuro* n)
[959]93{
94        bool mutated = false;
[967]95        int prop = selectRandomNeuroClassProperty(n);
[959]96        if (prop >= 0)
97        {
98                if (prop >= GenoOperators::NEUROCLASS_PROP_OFFSET)
99                {
[967]100                        SyntParam par = n->classProperties();   //commits changes when this object is destroyed
[959]101                        mutated = mutateProperty(par, prop - GenoOperators::NEUROCLASS_PROP_OFFSET);
102                }
103                else
104                {
105                        Param par = n->extraProperties();
106                        mutated = mutateProperty(par, prop);
107                }
108        }
109        return mutated;
110}
111
[967]112int GenoOperators::selectRandomNeuroClassProperty(Neuro *n)
[109]113{
[168]114        int neuext = n->extraProperties().getPropCount(),
115                neucls = n->getClass() == NULL ? 0 : n->getClass()->getProperties().getPropCount();
116        if (neuext + neucls == 0) return -1; //no properties in this neuron
[896]117        int index = rndUint(neuext + neucls);
[957]118        if (index >= neuext) index = index - neuext + NEUROCLASS_PROP_OFFSET;
[168]119        return index;
[109]120}
121
[967]122double GenoOperators::getMutatedNeuroClassProperty(double current, Neuro *n, int i)
[109]123{
[968]124        if (i == -1)
125        {
126                logPrintf("GenoOperators", "getMutatedNeuroClassProperty", LOG_WARN, "Deprecated usage in C++ source: to mutate connection weight, use getMutatedNeuronConnectionWeight().");
127                return getMutatedNeuronConnectionWeight(current);
128        }
[168]129        Param p;
[957]130        if (i >= NEUROCLASS_PROP_OFFSET) { i -= NEUROCLASS_PROP_OFFSET; p = n->getClass()->getProperties(); }
[168]131        else p = n->extraProperties();
132        double newval = current;
133        /*bool ok=*/getMutatedProperty(p, i, current, newval);
134        return newval;
[109]135}
136
[968]137double GenoOperators::getMutatedNeuronConnectionWeight(double current)
138{
139        return mutateCreepNoLimit('f', current, 2, true);
140}
141
[168]142bool GenoOperators::mutatePropertyNaive(ParamInterface &p, int i)
[109]143{
[168]144        double mn, mx, df;
145        if (p.type(i)[0] != 'f' && p.type(i)[0] != 'd') return false; //don't know how to mutate
146        getMinMaxDef(&p, i, mn, mx, df);
[109]147
[168]148        ExtValue ev;
149        p.get(i, ev);
[751]150        ev.setDouble(mutateCreep(p.type(i)[0], ev.getDouble(), mn, mx, true));
[168]151        p.set(i, ev);
152        return true;
[109]153}
154
[168]155bool GenoOperators::mutateProperty(ParamInterface &p, int i)
[109]156{
[168]157        double newval;
158        ExtValue ev;
159        p.get(i, ev);
160        bool ok = getMutatedProperty(p, i, ev.getDouble(), newval);
161        if (ok) { ev.setDouble(newval); p.set(i, ev); }
162        return ok;
[109]163}
164
[168]165bool GenoOperators::getMutatedProperty(ParamInterface &p, int i, double oldval, double &newval)
[109]166{
[168]167        newval = 0;
168        if (p.type(i)[0] != 'f' && p.type(i)[0] != 'd') return false; //don't know how to mutate
169        const char *n = p.id(i), *na = p.name(i);
[968]170        if (strcmp(n, "si") == 0 && strcmp(na, "Sigmoid") == 0) newval = round(CustomRnd(distrib_sigmo), 3); else
171                if (strcmp(n, "in") == 0 && strcmp(na, "Inertia") == 0) newval = round(CustomRnd(distrib_inertia), 3); else
172                        if (strcmp(n, "fo") == 0 && strcmp(na, "Force") == 0) newval = round(CustomRnd(distrib_force), 3); else
[168]173                        {
[899]174                                double mn, mx, df;
175                                getMinMaxDef(&p, i, mn, mx, df);
176                                newval = mutateCreep(p.type(i)[0], oldval, mn, mx, true);
[168]177                        }
178        return true;
[109]179}
180
[751]181double GenoOperators::mutateCreepNoLimit(char type, double current, double stddev, bool limit_precision_3digits)
[109]182{
[751]183        double result = RndGen.Gauss(current, stddev);
184        if (type == 'd')
185        {
186                result = int(result + 0.5);
[896]187                if (result == current) result += rndUint(2) * 2 - 1; //force some change
[751]188        }
189        else
190        {
191                if (limit_precision_3digits)
[968]192                        result = round(result, 3);
[751]193        }
[168]194        return result;
[109]195}
196
[751]197double GenoOperators::mutateCreep(char type, double current, double mn, double mx, double stddev, bool limit_precision_3digits)
[109]198{
[751]199        double result = mutateCreepNoLimit(type, current, stddev, limit_precision_3digits);
[764]200        if (result<mn || result>mx) //exceeds boundary, so bring to the allowed range
201        {
202                //reflect:
203                if (result > mx) result = mx - (result - mx); else
204                        if (result < mn) result = mn + (mn - result);
205                //wrap (just in case 'result' exceeded the allowed range so much that after reflection above it exceeded the other boundary):
206                if (result > mx) result = mn + fmod(result - mx, mx - mn); else
207                        if (result < mn) result = mn + fmod(mn - result, mx - mn);
208                if (limit_precision_3digits)
209                {
210                        //reflect and wrap above may have changed the (limited) precision, so try to round again (maybe unnecessarily, because we don't know if reflect+wrap above were triggered)
[968]211                        double result_try = round(result, 3);
[764]212                        if (mn <= result_try && result_try <= mx) result = result_try; //after rounding still witin allowed range, so keep rounded value
213                }
214        }
[146]215        return result;
[109]216}
217
[751]218double GenoOperators::mutateCreep(char type, double current, double mn, double mx, bool limit_precision_3digits)
219{
220        double stddev = (mx - mn) / 2 / 5; // magic arbitrary formula for stddev, which becomes /halfinterval, 5 times narrower
221        return mutateCreep(type, current, mn, mx, stddev, limit_precision_3digits);
222}
223
[146]224void GenoOperators::setIntFromDoubleWithProbabilisticDithering(ParamInterface &p, int index, double value) //TODO
225{
[749]226        p.setInt(index, (paInt)(value + 0.5)); //TODO value=2.499 will result in 2 and 2.5 will result in 3, but we want these cases to be 2 or 3 with almost equal probability. value=2.1 should be mostly 2, rarely 3. Careful with negative values (test it!)
[146]227}
228
[749]229void GenoOperators::linearMix(vector<double> &p1, vector<double> &p2, double proportion)
230{
231        if (p1.size() != p2.size())
232        {
233                logPrintf("GenoOperators", "linearMix", LOG_ERROR, "Cannot mix vectors of different length (%d and %d)", p1.size(), p2.size());
234                return;
235        }
236        for (unsigned int i = 0; i < p1.size(); i++)
237        {
238                double v1 = p1[i];
239                double v2 = p2[i];
[899]240                p1[i] = v1 * proportion + v2 * (1 - proportion);
241                p2[i] = v2 * proportion + v1 * (1 - proportion);
[749]242        }
243}
244
[146]245void GenoOperators::linearMix(ParamInterface &p1, int i1, ParamInterface &p2, int i2, double proportion)
246{
[158]247        char type1 = p1.type(i1)[0];
248        char type2 = p2.type(i2)[0];
249        if (type1 == 'f' && type2 == 'f')
[146]250        {
251                double v1 = p1.getDouble(i1);
252                double v2 = p2.getDouble(i2);
[899]253                p1.setDouble(i1, v1 * proportion + v2 * (1 - proportion));
254                p2.setDouble(i2, v2 * proportion + v1 * (1 - proportion));
[146]255        }
[158]256        else
257                if (type1 == 'd' && type2 == 'd')
258                {
[899]259                        int v1 = p1.getInt(i1);
260                        int v2 = p2.getInt(i2);
261                        setIntFromDoubleWithProbabilisticDithering(p1, i1, v1 * proportion + v2 * (1 - proportion));
262                        setIntFromDoubleWithProbabilisticDithering(p2, i2, v2 * proportion + v1 * (1 - proportion));
[158]263                }
264                else
[375]265                        logPrintf("GenoOperators", "linearMix", LOG_WARN, "Cannot mix values of types '%c' and '%c'", type1, type2);
[146]266}
267
[935]268int GenoOperators::getActiveNeuroClassCount(Model::ShapeType for_shape_type)
[801]269{
270        int count = 0;
271        for (int i = 0; i < Neuro::getClassCount(); i++)
[935]272        {
273                NeuroClass *nc = Neuro::getClass(i);
274                if (nc->isShapeTypeSupported(for_shape_type) && nc->genactive)
[801]275                        count++;
[935]276        }
[801]277        return count;
278}
279
[935]280NeuroClass *GenoOperators::getRandomNeuroClass(Model::ShapeType for_shape_type)
[109]281{
[899]282        vector<NeuroClass *> active;
[168]283        for (int i = 0; i < Neuro::getClassCount(); i++)
[935]284        {
285                NeuroClass *nc = Neuro::getClass(i);
286                if (nc->isShapeTypeSupported(for_shape_type) && nc->genactive)
287                        active.push_back(nc);
288        }
[896]289        if (active.size() == 0) return NULL; else return active[rndUint(active.size())];
[109]290}
291
[935]292NeuroClass *GenoOperators::getRandomNeuroClassWithOutput(Model::ShapeType for_shape_type)
[758]293{
[899]294        vector<NeuroClass *> active;
[758]295        for (int i = 0; i < Neuro::getClassCount(); i++)
[935]296        {
297                NeuroClass *nc = Neuro::getClass(i);
298                if (nc->isShapeTypeSupported(for_shape_type) && nc->genactive && nc->getPreferredOutput() != 0)
299                        active.push_back(nc);
300        }
[896]301        if (active.size() == 0) return NULL; else return active[rndUint(active.size())];
[758]302}
303
[935]304NeuroClass *GenoOperators::getRandomNeuroClassWithInput(Model::ShapeType for_shape_type)
[758]305{
[899]306        vector<NeuroClass *> active;
[758]307        for (int i = 0; i < Neuro::getClassCount(); i++)
[935]308        {
309                NeuroClass *nc = Neuro::getClass(i);
310                if (nc->isShapeTypeSupported(for_shape_type) && nc->genactive && nc->getPreferredInputs() != 0)
311                        active.push_back(nc);
312        }
[896]313        if (active.size() == 0) return NULL; else return active[rndUint(active.size())];
[758]314}
315
[1226]316NeuroClass *GenoOperators::getRandomNeuroClassWithOutputAndWantingNoInputs(Model::ShapeType for_shape_type)
[758]317{
[899]318        vector<NeuroClass *> active;
[758]319        for (int i = 0; i < Neuro::getClassCount(); i++)
[935]320        {
321                NeuroClass *nc = Neuro::getClass(i);
322                if (nc->isShapeTypeSupported(for_shape_type) && nc->genactive && nc->getPreferredOutput() != 0 && nc->getPreferredInputs() == 0)
323                        active.push_back(nc);
324        }
[896]325        if (active.size() == 0) return NULL; else return active[rndUint(active.size())];
[758]326}
327
[1226]328NeuroClass *GenoOperators::getRandomNeuroClassWithOutputAndWantingNoOrAnyInputs(Model::ShapeType for_shape_type)
329{
330        vector<NeuroClass *> active;
331        for (int i = 0; i < Neuro::getClassCount(); i++)
332        {
333                NeuroClass *nc = Neuro::getClass(i);
334                if (nc->isShapeTypeSupported(for_shape_type) && nc->genactive && nc->getPreferredOutput() != 0 && nc->getPreferredInputs() <= 0) // getPreferredInputs() should be 0 or -1 (any)
335                        active.push_back(nc);
336        }
337        if (active.size() == 0) return NULL; else return active[rndUint(active.size())];
338}
339
[899]340int GenoOperators::getRandomNeuroClassWithOutput(const vector<NeuroClass *> &NClist)
[673]341{
342        vector<int> allowed;
343        for (size_t i = 0; i < NClist.size(); i++)
344                if (NClist[i]->getPreferredOutput() != 0) //this NeuroClass provides output
345                        allowed.push_back(i);
[896]346        if (allowed.size() == 0) return -1; else return allowed[rndUint(allowed.size())];
[673]347}
348
[899]349int GenoOperators::getRandomNeuroClassWithInput(const vector<NeuroClass *> &NClist)
[673]350{
351        vector<int> allowed;
352        for (size_t i = 0; i < NClist.size(); i++)
353                if (NClist[i]->getPreferredInputs() != 0) //this NeuroClass wants one input connection or more                 
354                        allowed.push_back(i);
[896]355        if (allowed.size() == 0) return -1; else return allowed[rndUint(allowed.size())];
[673]356}
357
[1233]358NeuroClass *GenoOperators::parseNeuroClass(char *&s, ModelEnum::ShapeType for_shape_type)
[673]359{
[670]360        int maxlen = (int)strlen(s);
361        int NClen = 0;
362        NeuroClass *NC = NULL;
[899]363        for (int i = 0; i < Neuro::getClassCount(); i++)
[168]364        {
[1226]365                NeuroClass *nci = Neuro::getClass(i);
[1233]366                if (!nci->isShapeTypeSupported(for_shape_type))
[1226]367                        continue;
368                const char *nciname = nci->name.c_str();
369                int ncinamelen = (int)strlen(nciname);
370                if (maxlen >= ncinamelen && ncinamelen > NClen && (strncmp(s, nciname, ncinamelen) == 0))
[670]371                {
[1226]372                        NC = nci;
373                        NClen = ncinamelen;
[670]374                }
[168]375        }
[670]376        s += NClen;
377        return NC;
[109]378}
379
[899]380Neuro *GenoOperators::findNeuro(const Model *m, const NeuroClass *nc)
[109]381{
[168]382        if (!m) return NULL;
383        for (int i = 0; i < m->getNeuroCount(); i++)
384                if (m->getNeuro(i)->getClass() == nc) return m->getNeuro(i);
385        return NULL; //neuron of class 'nc' was not found
[109]386}
387
[899]388int GenoOperators::neuroClassProp(char *&s, NeuroClass *nc, bool also_v1_N_props)
[109]389{
[247]390        int len = (int)strlen(s);
[168]391        int Len = 0, I = -1;
392        if (nc)
393        {
394                Param p = nc->getProperties();
[899]395                for (int i = 0; i < p.getPropCount(); i++)
[168]396                {
397                        const char *n = p.id(i);
[247]398                        int l = (int)strlen(n);
[957]399                        if (len >= l && l > Len && (strncmp(s, n, l) == 0)) { I = NEUROCLASS_PROP_OFFSET + i; Len = l; }
[968]400                        if (also_v1_N_props) //recognize old symbols of properties:  /=!
[168]401                        {
402                                if (strcmp(n, "si") == 0) n = "/"; else
403                                        if (strcmp(n, "in") == 0) n = "="; else
404                                                if (strcmp(n, "fo") == 0) n = "!";
[247]405                                l = (int)strlen(n);
[957]406                                if (len >= l && l > Len && (strncmp(s, n, l) == 0)) { I = NEUROCLASS_PROP_OFFSET + i; Len = l; }
[168]407                        }
408                }
409        }
410        Neuro n;
411        Param p = n.extraProperties();
[899]412        for (int i = 0; i < p.getPropCount(); i++)
[168]413        {
414                const char *n = p.id(i);
[247]415                int l = (int)strlen(n);
[899]416                if (len >= l && l > Len && (strncmp(s, n, l) == 0)) { I = i; Len = l; }
[168]417        }
418        s += Len;
419        return I;
[109]420}
421
[1233]422bool GenoOperators::canStartNeuroClassName(const char firstchar)
423{
424        return isupper(firstchar) || firstchar == '|' || firstchar == '@' || firstchar == '*';
425}
426
[121]427bool GenoOperators::isWS(const char c)
[168]428{
429        return c == ' ' || c == '\n' || c == '\t' || c == '\r';
430}
[109]431
[121]432void GenoOperators::skipWS(char *&s)
[158]433{
[168]434        if (s == NULL)
[375]435                logMessage("GenoOperators", "skipWS", LOG_WARN, "NULL reference!");
[158]436        else
[670]437                while (isWS(*s)) s++;
[109]438}
439
[168]440bool GenoOperators::areAlike(char *g1, char *g2)
[109]441{
442        while (*g1 || *g2)
443        {
444                skipWS(g1);
445                skipWS(g2);
446                if (*g1 != *g2) return false; //when difference
[168]447                if (!*g1 && !*g2) break; //both end
448                g1++;
449                g2++;
[109]450        }
451        return true; //equal
452}
453
[899]454char *GenoOperators::strchrn0(const char *str, char ch)
[168]455{
[899]456        return ch == 0 ? NULL : strchr((char *)str, ch);
[168]457}
[109]458
[1233]459int GenoOperators::getRandomChar(const char *choices, const char *excluded)
[109]460{
[1233]461        int allowed_count = 0;
462        for (size_t i = 0; i < strlen(choices); i++) if (!strchrn0(excluded, choices[i])) allowed_count++;
463        if (allowed_count == 0) return -1; //no char is allowed
464        int rnd_index = rndUint(allowed_count) + 1;
465        allowed_count = 0;
466        for (size_t i = 0; i < strlen(choices); i++)
467        {
468                if (!strchrn0(excluded, choices[i])) allowed_count++;
469                if (allowed_count == rnd_index) return int(i);
470        }
471        return -1; //never happens
[109]472}
[1233]473
474//#include <cassert>
[1241]475string GenoOperators::simplifiedModifiersFixedOrder(const char *str_of_char_pairs, vector<int> &char_counts)
[1233]476{
477//      assert(strlen(str_of_char_pairs) == char_counts.size());
478//      assert(char_counts.size() % 2 == 0);
[1241]479        const int MAX_NUMBER_SAME_TYPE = 8; // max. number of modifiers of each type (case-sensitive) - mainly for rR, even though for rR, 4 would be sufficient if we assume lower or upper can be chosen as required for minimal length, e.g. rrrrr==RRR, RRRRRR==rr
[1233]480        string simplified;
[1241]481        //#define CLUMP_IDENTICAL_MODIFIERS //not good because with the exception of rR properties are calculated incrementally, non-linearly, and their values are updated after each modifier character, so these values may for example saturate after a large number of identical modifier symbols. The order of modifiers is (with the exception of rR) relevant and extreme values of properties increase this relevance, so better keep the modifiers dispersed.
[1233]482#ifdef CLUMP_IDENTICAL_MODIFIERS
483        for (size_t i = 0; i < strlen(str_of_char_pairs); i++)
484                if ((i % 2) == 0) //only even index "i" in str_of_char_pairs
485                        for (int j = 0; j < std::min(MAX_NUMBER_SAME_TYPE, abs(char_counts[i] - char_counts[i + 1])); j++) //assume that an even-index char and the following odd-index char have the opposite influence, so they cancel out.
486                                simplified += str_of_char_pairs[i + (char_counts[i + 1] > char_counts[i])]; //inner loop adds a sequence of same chars such as rrrrr or QQQ
487#else
488        for (size_t i = 0; i < strlen(str_of_char_pairs); i++)
489                if ((i % 2) == 0) //only even index "i" in str_of_char_pairs
490                {
491                        char_counts[i] -= char_counts[i + 1]; //from now on, even items in the vector store the difference between antagonistic modifier symbols; odd items are not needed
492                        char_counts[i] = std::min(std::max(char_counts[i], -MAX_NUMBER_SAME_TYPE), MAX_NUMBER_SAME_TYPE);
493                }
494        int remaining;
495        do {
496                remaining = 0;
497                for (size_t i = 0; i < strlen(str_of_char_pairs); i++)
498                        if ((i % 2) == 0) //only even index "i" in str_of_char_pairs
499                                if (char_counts[i] != 0)
500                                {
501                                        simplified += str_of_char_pairs[i + (char_counts[i] < 0)];
502                                        char_counts[i] += char_counts[i] > 0 ? -1 : +1; //decrease the difference towards zero
503                                        remaining += abs(char_counts[i]);
504                                }
505        } while (remaining > 0);
506#endif
507        return simplified;
508}
[1241]509
510string GenoOperators::simplifiedModifiers(const string & original)
511{
512        const int MAX_NUMBER_SAME_TYPE = 6; // max. number of modifiers of each type (case-insensitive). rR could be treated separately in simplification because their influence follows different (i.e., simple additive) logic - so the simplifiedModifiersFixedOrder() logic with cancelling out is appropriate for rR. However in this function, making no exception to rR does not cause any harm to these modifiers either - the only consequence is that we will not remove antagonistic letters and will not simplify sequences of rR longer than 4, while they could be simplified (e.g. rrrrr==RRR, RRRRRR==rr).
513        int counter[256] = {}; //initialize with zeros; 256 is unnecessarily too big and redundant, but enables very fast access (indexed directly by the ascii code)
514        string simplified = "";
515        for (int i = original.size() - 1; i >= 0; i--) //iterate from end to begin - easier to remove "oldest" = first modifiers
516        {
517                unsigned char c = original[i];
518                if (!std::isalpha(c))
519                        continue;
520                unsigned char lower = std::tolower(c);
521                counter[lower]++;
522                if (counter[lower] <= MAX_NUMBER_SAME_TYPE) //get rid of modifiers that are too numerous, but get rid of the first ones in the string (="oldest", the last ones looking from the end), because their influence on the parameter value is the smallest
523                        simplified += c;
524        }
525        std::reverse(simplified.begin(), simplified.end()); //"simplified" was built in reverse order, so need to restore the order that corresponds to "original"
526        return simplified;
527}
Note: See TracBrowser for help on using the repository browser.