Search (98 results, page 1 of 5)

  • × type_ss:"x"
  1. Verwer, K.: Freiheit und Verantwortung bei Hans Jonas (2011) 0.55
    0.55427843 = product of:
      1.4780759 = sum of:
        0.10193627 = product of:
          0.3058088 = sum of:
            0.3058088 = weight(_text_:3a in 973) [ClassicSimilarity], result of:
              0.3058088 = score(doc=973,freq=2.0), product of:
                0.27206317 = queryWeight, product of:
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.032090448 = queryNorm
                1.1240361 = fieldWeight in 973, product of:
                  1.4142135 = tf(freq=2.0), with freq of:
                    2.0 = termFreq=2.0
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.09375 = fieldNorm(doc=973)
          0.33333334 = coord(1/3)
        0.3058088 = weight(_text_:2f in 973) [ClassicSimilarity], result of:
          0.3058088 = score(doc=973,freq=2.0), product of:
            0.27206317 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.032090448 = queryNorm
            1.1240361 = fieldWeight in 973, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.09375 = fieldNorm(doc=973)
        0.3058088 = weight(_text_:2f in 973) [ClassicSimilarity], result of:
          0.3058088 = score(doc=973,freq=2.0), product of:
            0.27206317 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.032090448 = queryNorm
            1.1240361 = fieldWeight in 973, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.09375 = fieldNorm(doc=973)
        0.1529044 = product of:
          0.3058088 = sum of:
            0.3058088 = weight(_text_:3a in 973) [ClassicSimilarity], result of:
              0.3058088 = score(doc=973,freq=2.0), product of:
                0.27206317 = queryWeight, product of:
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.032090448 = queryNorm
                1.1240361 = fieldWeight in 973, product of:
                  1.4142135 = tf(freq=2.0), with freq of:
                    2.0 = termFreq=2.0
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.09375 = fieldNorm(doc=973)
          0.5 = coord(1/2)
        0.3058088 = weight(_text_:2f in 973) [ClassicSimilarity], result of:
          0.3058088 = score(doc=973,freq=2.0), product of:
            0.27206317 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.032090448 = queryNorm
            1.1240361 = fieldWeight in 973, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.09375 = fieldNorm(doc=973)
        0.3058088 = weight(_text_:2f in 973) [ClassicSimilarity], result of:
          0.3058088 = score(doc=973,freq=2.0), product of:
            0.27206317 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.032090448 = queryNorm
            1.1240361 = fieldWeight in 973, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.09375 = fieldNorm(doc=973)
      0.375 = coord(6/16)
    
    Content
    Vgl.: http%3A%2F%2Fcreativechoice.org%2Fdoc%2FHansJonas.pdf&usg=AOvVaw1TM3teaYKgABL5H9yoIifA&opi=89978449.
  2. Farazi, M.: Faceted lightweight ontologies : a formalization and some experiments (2010) 0.34
    0.3366587 = product of:
      0.76950556 = sum of:
        0.04247345 = product of:
          0.12742035 = sum of:
            0.12742035 = weight(_text_:3a in 4997) [ClassicSimilarity], result of:
              0.12742035 = score(doc=4997,freq=2.0), product of:
                0.27206317 = queryWeight, product of:
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.032090448 = queryNorm
                0.46834838 = fieldWeight in 4997, product of:
                  1.4142135 = tf(freq=2.0), with freq of:
                    2.0 = termFreq=2.0
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.0390625 = fieldNorm(doc=4997)
          0.33333334 = coord(1/3)
        0.12742035 = weight(_text_:2f in 4997) [ClassicSimilarity], result of:
          0.12742035 = score(doc=4997,freq=2.0), product of:
            0.27206317 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.032090448 = queryNorm
            0.46834838 = fieldWeight in 4997, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0390625 = fieldNorm(doc=4997)
        0.12742035 = weight(_text_:2f in 4997) [ClassicSimilarity], result of:
          0.12742035 = score(doc=4997,freq=2.0), product of:
            0.27206317 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.032090448 = queryNorm
            0.46834838 = fieldWeight in 4997, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0390625 = fieldNorm(doc=4997)
        0.1723855 = sum of:
          0.04496515 = weight(_text_:rules in 4997) [ClassicSimilarity], result of:
            0.04496515 = score(doc=4997,freq=2.0), product of:
              0.16161752 = queryWeight, product of:
                5.036312 = idf(docFreq=780, maxDocs=44218)
                0.032090448 = queryNorm
              0.27821955 = fieldWeight in 4997, product of:
                1.4142135 = tf(freq=2.0), with freq of:
                  2.0 = termFreq=2.0
                5.036312 = idf(docFreq=780, maxDocs=44218)
                0.0390625 = fieldNorm(doc=4997)
          0.12742035 = weight(_text_:3a in 4997) [ClassicSimilarity], result of:
            0.12742035 = score(doc=4997,freq=2.0), product of:
              0.27206317 = queryWeight, product of:
                8.478011 = idf(docFreq=24, maxDocs=44218)
                0.032090448 = queryNorm
              0.46834838 = fieldWeight in 4997, product of:
                1.4142135 = tf(freq=2.0), with freq of:
                  2.0 = termFreq=2.0
                8.478011 = idf(docFreq=24, maxDocs=44218)
                0.0390625 = fieldNorm(doc=4997)
        0.12742035 = weight(_text_:2f in 4997) [ClassicSimilarity], result of:
          0.12742035 = score(doc=4997,freq=2.0), product of:
            0.27206317 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.032090448 = queryNorm
            0.46834838 = fieldWeight in 4997, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0390625 = fieldNorm(doc=4997)
        0.04496515 = weight(_text_:rules in 4997) [ClassicSimilarity], result of:
          0.04496515 = score(doc=4997,freq=2.0), product of:
            0.16161752 = queryWeight, product of:
              5.036312 = idf(docFreq=780, maxDocs=44218)
              0.032090448 = queryNorm
            0.27821955 = fieldWeight in 4997, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              5.036312 = idf(docFreq=780, maxDocs=44218)
              0.0390625 = fieldNorm(doc=4997)
        0.12742035 = weight(_text_:2f in 4997) [ClassicSimilarity], result of:
          0.12742035 = score(doc=4997,freq=2.0), product of:
            0.27206317 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.032090448 = queryNorm
            0.46834838 = fieldWeight in 4997, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0390625 = fieldNorm(doc=4997)
      0.4375 = coord(7/16)
    
    Abstract
    While classifications are heavily used to categorize web content, the evolution of the web foresees a more formal structure - ontology - which can serve this purpose. Ontologies are core artifacts of the Semantic Web which enable machines to use inference rules to conduct automated reasoning on data. Lightweight ontologies bridge the gap between classifications and ontologies. A lightweight ontology (LO) is an ontology representing a backbone taxonomy where the concept of the child node is more specific than the concept of the parent node. Formal lightweight ontologies can be generated from their informal ones. The key applications of formal lightweight ontologies are document classification, semantic search, and data integration. However, these applications suffer from the following problems: the disambiguation accuracy of the state of the art NLP tools used in generating formal lightweight ontologies from their informal ones; the lack of background knowledge needed for the formal lightweight ontologies; and the limitation of ontology reuse. In this dissertation, we propose a novel solution to these problems in formal lightweight ontologies; namely, faceted lightweight ontology (FLO). FLO is a lightweight ontology in which terms, present in each node label, and their concepts, are available in the background knowledge (BK), which is organized as a set of facets. A facet can be defined as a distinctive property of the groups of concepts that can help in differentiating one group from another. Background knowledge can be defined as a subset of a knowledge base, such as WordNet, and often represents a specific domain.
    Content
    PhD Dissertation at International Doctorate School in Information and Communication Technology. Vgl.: https%3A%2F%2Fcore.ac.uk%2Fdownload%2Fpdf%2F150083013.pdf&usg=AOvVaw2n-qisNagpyT0lli_6QbAQ.
  3. Xiong, C.: Knowledge based text representations for information retrieval (2016) 0.25
    0.24809459 = product of:
      0.66158557 = sum of:
        0.03397876 = product of:
          0.10193627 = sum of:
            0.10193627 = weight(_text_:3a in 5820) [ClassicSimilarity], result of:
              0.10193627 = score(doc=5820,freq=2.0), product of:
                0.27206317 = queryWeight, product of:
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.032090448 = queryNorm
                0.3746787 = fieldWeight in 5820, product of:
                  1.4142135 = tf(freq=2.0), with freq of:
                    2.0 = termFreq=2.0
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.03125 = fieldNorm(doc=5820)
          0.33333334 = coord(1/3)
        0.14415966 = weight(_text_:2f in 5820) [ClassicSimilarity], result of:
          0.14415966 = score(doc=5820,freq=4.0), product of:
            0.27206317 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.032090448 = queryNorm
            0.5298757 = fieldWeight in 5820, product of:
              2.0 = tf(freq=4.0), with freq of:
                4.0 = termFreq=4.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.03125 = fieldNorm(doc=5820)
        0.14415966 = weight(_text_:2f in 5820) [ClassicSimilarity], result of:
          0.14415966 = score(doc=5820,freq=4.0), product of:
            0.27206317 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.032090448 = queryNorm
            0.5298757 = fieldWeight in 5820, product of:
              2.0 = tf(freq=4.0), with freq of:
                4.0 = termFreq=4.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.03125 = fieldNorm(doc=5820)
        0.050968137 = product of:
          0.10193627 = sum of:
            0.10193627 = weight(_text_:3a in 5820) [ClassicSimilarity], result of:
              0.10193627 = score(doc=5820,freq=2.0), product of:
                0.27206317 = queryWeight, product of:
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.032090448 = queryNorm
                0.3746787 = fieldWeight in 5820, product of:
                  1.4142135 = tf(freq=2.0), with freq of:
                    2.0 = termFreq=2.0
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.03125 = fieldNorm(doc=5820)
          0.5 = coord(1/2)
        0.14415966 = weight(_text_:2f in 5820) [ClassicSimilarity], result of:
          0.14415966 = score(doc=5820,freq=4.0), product of:
            0.27206317 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.032090448 = queryNorm
            0.5298757 = fieldWeight in 5820, product of:
              2.0 = tf(freq=4.0), with freq of:
                4.0 = termFreq=4.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.03125 = fieldNorm(doc=5820)
        0.14415966 = weight(_text_:2f in 5820) [ClassicSimilarity], result of:
          0.14415966 = score(doc=5820,freq=4.0), product of:
            0.27206317 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.032090448 = queryNorm
            0.5298757 = fieldWeight in 5820, product of:
              2.0 = tf(freq=4.0), with freq of:
                4.0 = termFreq=4.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.03125 = fieldNorm(doc=5820)
      0.375 = coord(6/16)
    
    Content
    Submitted in partial fulfillment of the requirements for the degree of Doctor of Philosophy in Language and Information Technologies. Vgl.: https%3A%2F%2Fwww.cs.cmu.edu%2F~cx%2Fpapers%2Fknowledge_based_text_representation.pdf&usg=AOvVaw0SaTSvhWLTh__Uz_HtOtl3.
  4. Shala, E.: ¬Die Autonomie des Menschen und der Maschine : gegenwärtige Definitionen von Autonomie zwischen philosophischem Hintergrund und technologischer Umsetzbarkeit (2014) 0.23
    0.2309494 = product of:
      0.61586505 = sum of:
        0.04247345 = product of:
          0.12742035 = sum of:
            0.12742035 = weight(_text_:3a in 4388) [ClassicSimilarity], result of:
              0.12742035 = score(doc=4388,freq=2.0), product of:
                0.27206317 = queryWeight, product of:
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.032090448 = queryNorm
                0.46834838 = fieldWeight in 4388, product of:
                  1.4142135 = tf(freq=2.0), with freq of:
                    2.0 = termFreq=2.0
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.0390625 = fieldNorm(doc=4388)
          0.33333334 = coord(1/3)
        0.12742035 = weight(_text_:2f in 4388) [ClassicSimilarity], result of:
          0.12742035 = score(doc=4388,freq=2.0), product of:
            0.27206317 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.032090448 = queryNorm
            0.46834838 = fieldWeight in 4388, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0390625 = fieldNorm(doc=4388)
        0.12742035 = weight(_text_:2f in 4388) [ClassicSimilarity], result of:
          0.12742035 = score(doc=4388,freq=2.0), product of:
            0.27206317 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.032090448 = queryNorm
            0.46834838 = fieldWeight in 4388, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0390625 = fieldNorm(doc=4388)
        0.063710175 = product of:
          0.12742035 = sum of:
            0.12742035 = weight(_text_:3a in 4388) [ClassicSimilarity], result of:
              0.12742035 = score(doc=4388,freq=2.0), product of:
                0.27206317 = queryWeight, product of:
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.032090448 = queryNorm
                0.46834838 = fieldWeight in 4388, product of:
                  1.4142135 = tf(freq=2.0), with freq of:
                    2.0 = termFreq=2.0
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.0390625 = fieldNorm(doc=4388)
          0.5 = coord(1/2)
        0.12742035 = weight(_text_:2f in 4388) [ClassicSimilarity], result of:
          0.12742035 = score(doc=4388,freq=2.0), product of:
            0.27206317 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.032090448 = queryNorm
            0.46834838 = fieldWeight in 4388, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0390625 = fieldNorm(doc=4388)
        0.12742035 = weight(_text_:2f in 4388) [ClassicSimilarity], result of:
          0.12742035 = score(doc=4388,freq=2.0), product of:
            0.27206317 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.032090448 = queryNorm
            0.46834838 = fieldWeight in 4388, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0390625 = fieldNorm(doc=4388)
      0.375 = coord(6/16)
    
    Footnote
    Vgl. unter: https://www.google.de/url?sa=t&rct=j&q=&esrc=s&source=web&cd=2&cad=rja&uact=8&ved=2ahUKEwizweHljdbcAhVS16QKHXcFD9QQFjABegQICRAB&url=https%3A%2F%2Fwww.researchgate.net%2Fpublication%2F271200105_Die_Autonomie_des_Menschen_und_der_Maschine_-_gegenwartige_Definitionen_von_Autonomie_zwischen_philosophischem_Hintergrund_und_technologischer_Umsetzbarkeit_Redigierte_Version_der_Magisterarbeit_Karls&usg=AOvVaw06orrdJmFF2xbCCp_hL26q.
  5. Piros, A.: Az ETO-jelzetek automatikus interpretálásának és elemzésének kérdései (2018) 0.23
    0.2309494 = product of:
      0.61586505 = sum of:
        0.04247345 = product of:
          0.12742035 = sum of:
            0.12742035 = weight(_text_:3a in 855) [ClassicSimilarity], result of:
              0.12742035 = score(doc=855,freq=2.0), product of:
                0.27206317 = queryWeight, product of:
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.032090448 = queryNorm
                0.46834838 = fieldWeight in 855, product of:
                  1.4142135 = tf(freq=2.0), with freq of:
                    2.0 = termFreq=2.0
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.0390625 = fieldNorm(doc=855)
          0.33333334 = coord(1/3)
        0.12742035 = weight(_text_:2f in 855) [ClassicSimilarity], result of:
          0.12742035 = score(doc=855,freq=2.0), product of:
            0.27206317 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.032090448 = queryNorm
            0.46834838 = fieldWeight in 855, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0390625 = fieldNorm(doc=855)
        0.12742035 = weight(_text_:2f in 855) [ClassicSimilarity], result of:
          0.12742035 = score(doc=855,freq=2.0), product of:
            0.27206317 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.032090448 = queryNorm
            0.46834838 = fieldWeight in 855, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0390625 = fieldNorm(doc=855)
        0.063710175 = product of:
          0.12742035 = sum of:
            0.12742035 = weight(_text_:3a in 855) [ClassicSimilarity], result of:
              0.12742035 = score(doc=855,freq=2.0), product of:
                0.27206317 = queryWeight, product of:
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.032090448 = queryNorm
                0.46834838 = fieldWeight in 855, product of:
                  1.4142135 = tf(freq=2.0), with freq of:
                    2.0 = termFreq=2.0
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.0390625 = fieldNorm(doc=855)
          0.5 = coord(1/2)
        0.12742035 = weight(_text_:2f in 855) [ClassicSimilarity], result of:
          0.12742035 = score(doc=855,freq=2.0), product of:
            0.27206317 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.032090448 = queryNorm
            0.46834838 = fieldWeight in 855, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0390625 = fieldNorm(doc=855)
        0.12742035 = weight(_text_:2f in 855) [ClassicSimilarity], result of:
          0.12742035 = score(doc=855,freq=2.0), product of:
            0.27206317 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.032090448 = queryNorm
            0.46834838 = fieldWeight in 855, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0390625 = fieldNorm(doc=855)
      0.375 = coord(6/16)
    
    Content
    Vgl. auch: New automatic interpreter for complex UDC numbers. Unter: <https%3A%2F%2Fudcc.org%2Ffiles%2FAttilaPiros_EC_36-37_2014-2015.pdf&usg=AOvVaw3kc9CwDDCWP7aArpfjrs5b>
  6. Gabler, S.: Vergabe von DDC-Sachgruppen mittels eines Schlagwort-Thesaurus (2021) 0.23
    0.2309494 = product of:
      0.61586505 = sum of:
        0.04247345 = product of:
          0.12742035 = sum of:
            0.12742035 = weight(_text_:3a in 1000) [ClassicSimilarity], result of:
              0.12742035 = score(doc=1000,freq=2.0), product of:
                0.27206317 = queryWeight, product of:
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.032090448 = queryNorm
                0.46834838 = fieldWeight in 1000, product of:
                  1.4142135 = tf(freq=2.0), with freq of:
                    2.0 = termFreq=2.0
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.0390625 = fieldNorm(doc=1000)
          0.33333334 = coord(1/3)
        0.12742035 = weight(_text_:2f in 1000) [ClassicSimilarity], result of:
          0.12742035 = score(doc=1000,freq=2.0), product of:
            0.27206317 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.032090448 = queryNorm
            0.46834838 = fieldWeight in 1000, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0390625 = fieldNorm(doc=1000)
        0.12742035 = weight(_text_:2f in 1000) [ClassicSimilarity], result of:
          0.12742035 = score(doc=1000,freq=2.0), product of:
            0.27206317 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.032090448 = queryNorm
            0.46834838 = fieldWeight in 1000, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0390625 = fieldNorm(doc=1000)
        0.063710175 = product of:
          0.12742035 = sum of:
            0.12742035 = weight(_text_:3a in 1000) [ClassicSimilarity], result of:
              0.12742035 = score(doc=1000,freq=2.0), product of:
                0.27206317 = queryWeight, product of:
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.032090448 = queryNorm
                0.46834838 = fieldWeight in 1000, product of:
                  1.4142135 = tf(freq=2.0), with freq of:
                    2.0 = termFreq=2.0
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.0390625 = fieldNorm(doc=1000)
          0.5 = coord(1/2)
        0.12742035 = weight(_text_:2f in 1000) [ClassicSimilarity], result of:
          0.12742035 = score(doc=1000,freq=2.0), product of:
            0.27206317 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.032090448 = queryNorm
            0.46834838 = fieldWeight in 1000, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0390625 = fieldNorm(doc=1000)
        0.12742035 = weight(_text_:2f in 1000) [ClassicSimilarity], result of:
          0.12742035 = score(doc=1000,freq=2.0), product of:
            0.27206317 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.032090448 = queryNorm
            0.46834838 = fieldWeight in 1000, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0390625 = fieldNorm(doc=1000)
      0.375 = coord(6/16)
    
    Content
    Master thesis Master of Science (Library and Information Studies) (MSc), Universität Wien. Advisor: Christoph Steiner. Vgl.: https://www.researchgate.net/publication/371680244_Vergabe_von_DDC-Sachgruppen_mittels_eines_Schlagwort-Thesaurus. DOI: 10.25365/thesis.70030. Vgl. dazu die Präsentation unter: https://www.google.com/url?sa=i&rct=j&q=&esrc=s&source=web&cd=&ved=0CAIQw7AJahcKEwjwoZzzytz_AhUAAAAAHQAAAAAQAg&url=https%3A%2F%2Fwiki.dnb.de%2Fdownload%2Fattachments%2F252121510%2FDA3%2520Workshop-Gabler.pdf%3Fversion%3D1%26modificationDate%3D1671093170000%26api%3Dv2&psig=AOvVaw0szwENK1or3HevgvIDOfjx&ust=1687719410889597&opi=89978449.
  7. Huo, W.: Automatic multi-word term extraction and its application to Web-page summarization (2012) 0.20
    0.19520658 = product of:
      0.6246611 = sum of:
        0.1529044 = weight(_text_:2f in 563) [ClassicSimilarity], result of:
          0.1529044 = score(doc=563,freq=2.0), product of:
            0.27206317 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.032090448 = queryNorm
            0.56201804 = fieldWeight in 563, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.046875 = fieldNorm(doc=563)
        0.1529044 = weight(_text_:2f in 563) [ClassicSimilarity], result of:
          0.1529044 = score(doc=563,freq=2.0), product of:
            0.27206317 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.032090448 = queryNorm
            0.56201804 = fieldWeight in 563, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.046875 = fieldNorm(doc=563)
        0.1529044 = weight(_text_:2f in 563) [ClassicSimilarity], result of:
          0.1529044 = score(doc=563,freq=2.0), product of:
            0.27206317 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.032090448 = queryNorm
            0.56201804 = fieldWeight in 563, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.046875 = fieldNorm(doc=563)
        0.1529044 = weight(_text_:2f in 563) [ClassicSimilarity], result of:
          0.1529044 = score(doc=563,freq=2.0), product of:
            0.27206317 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.032090448 = queryNorm
            0.56201804 = fieldWeight in 563, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.046875 = fieldNorm(doc=563)
        0.013043438 = product of:
          0.026086876 = sum of:
            0.026086876 = weight(_text_:22 in 563) [ClassicSimilarity], result of:
              0.026086876 = score(doc=563,freq=2.0), product of:
                0.11237528 = queryWeight, product of:
                  3.5018296 = idf(docFreq=3622, maxDocs=44218)
                  0.032090448 = queryNorm
                0.23214069 = fieldWeight in 563, product of:
                  1.4142135 = tf(freq=2.0), with freq of:
                    2.0 = termFreq=2.0
                  3.5018296 = idf(docFreq=3622, maxDocs=44218)
                  0.046875 = fieldNorm(doc=563)
          0.5 = coord(1/2)
      0.3125 = coord(5/16)
    
    Content
    A Thesis presented to The University of Guelph In partial fulfilment of requirements for the degree of Master of Science in Computer Science. Vgl. Unter: http://www.inf.ufrgs.br%2F~ceramisch%2Fdownload_files%2Fpublications%2F2009%2Fp01.pdf.
    Date
    10. 1.2013 19:22:47
  8. Stojanovic, N.: Ontology-based Information Retrieval : methods and tools for cooperative query answering (2005) 0.18
    0.18475951 = product of:
      0.49269202 = sum of:
        0.03397876 = product of:
          0.10193627 = sum of:
            0.10193627 = weight(_text_:3a in 701) [ClassicSimilarity], result of:
              0.10193627 = score(doc=701,freq=2.0), product of:
                0.27206317 = queryWeight, product of:
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.032090448 = queryNorm
                0.3746787 = fieldWeight in 701, product of:
                  1.4142135 = tf(freq=2.0), with freq of:
                    2.0 = termFreq=2.0
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.03125 = fieldNorm(doc=701)
          0.33333334 = coord(1/3)
        0.10193627 = weight(_text_:2f in 701) [ClassicSimilarity], result of:
          0.10193627 = score(doc=701,freq=2.0), product of:
            0.27206317 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.032090448 = queryNorm
            0.3746787 = fieldWeight in 701, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.03125 = fieldNorm(doc=701)
        0.10193627 = weight(_text_:2f in 701) [ClassicSimilarity], result of:
          0.10193627 = score(doc=701,freq=2.0), product of:
            0.27206317 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.032090448 = queryNorm
            0.3746787 = fieldWeight in 701, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.03125 = fieldNorm(doc=701)
        0.050968137 = product of:
          0.10193627 = sum of:
            0.10193627 = weight(_text_:3a in 701) [ClassicSimilarity], result of:
              0.10193627 = score(doc=701,freq=2.0), product of:
                0.27206317 = queryWeight, product of:
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.032090448 = queryNorm
                0.3746787 = fieldWeight in 701, product of:
                  1.4142135 = tf(freq=2.0), with freq of:
                    2.0 = termFreq=2.0
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.03125 = fieldNorm(doc=701)
          0.5 = coord(1/2)
        0.10193627 = weight(_text_:2f in 701) [ClassicSimilarity], result of:
          0.10193627 = score(doc=701,freq=2.0), product of:
            0.27206317 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.032090448 = queryNorm
            0.3746787 = fieldWeight in 701, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.03125 = fieldNorm(doc=701)
        0.10193627 = weight(_text_:2f in 701) [ClassicSimilarity], result of:
          0.10193627 = score(doc=701,freq=2.0), product of:
            0.27206317 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.032090448 = queryNorm
            0.3746787 = fieldWeight in 701, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.03125 = fieldNorm(doc=701)
      0.375 = coord(6/16)
    
    Content
    Vgl.: http%3A%2F%2Fdigbib.ubka.uni-karlsruhe.de%2Fvolltexte%2Fdocuments%2F1627&ei=tAtYUYrBNoHKtQb3l4GYBw&usg=AFQjCNHeaxKkKU3-u54LWxMNYGXaaDLCGw&sig2=8WykXWQoDKjDSdGtAakH2Q&bvm=bv.44442042,d.Yms.
  9. Richter, S.: ¬Die formale Beschreibung von Dokumenten in Archiven und Bibliotheken : Perspektiven des Datenaustauschs (2004) 0.08
    0.07782647 = product of:
      0.20753726 = sum of:
        0.039650064 = weight(_text_:cataloguing in 4982) [ClassicSimilarity], result of:
          0.039650064 = score(doc=4982,freq=4.0), product of:
            0.14268221 = queryWeight, product of:
              4.446252 = idf(docFreq=1408, maxDocs=44218)
              0.032090448 = queryNorm
            0.27789074 = fieldWeight in 4982, product of:
              2.0 = tf(freq=4.0), with freq of:
                4.0 = termFreq=4.0
              4.446252 = idf(docFreq=1408, maxDocs=44218)
              0.03125 = fieldNorm(doc=4982)
        0.017986061 = product of:
          0.035972122 = sum of:
            0.035972122 = weight(_text_:rules in 4982) [ClassicSimilarity], result of:
              0.035972122 = score(doc=4982,freq=2.0), product of:
                0.16161752 = queryWeight, product of:
                  5.036312 = idf(docFreq=780, maxDocs=44218)
                  0.032090448 = queryNorm
                0.22257565 = fieldWeight in 4982, product of:
                  1.4142135 = tf(freq=2.0), with freq of:
                    2.0 = termFreq=2.0
                  5.036312 = idf(docFreq=780, maxDocs=44218)
                  0.03125 = fieldNorm(doc=4982)
          0.5 = coord(1/2)
        0.057794057 = weight(_text_:anglo in 4982) [ClassicSimilarity], result of:
          0.057794057 = score(doc=4982,freq=2.0), product of:
            0.20485519 = queryWeight, product of:
              6.3836813 = idf(docFreq=202, maxDocs=44218)
              0.032090448 = queryNorm
            0.2821215 = fieldWeight in 4982, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              6.3836813 = idf(docFreq=202, maxDocs=44218)
              0.03125 = fieldNorm(doc=4982)
        0.016484896 = weight(_text_:american in 4982) [ClassicSimilarity], result of:
          0.016484896 = score(doc=4982,freq=2.0), product of:
            0.10940785 = queryWeight, product of:
              3.4093587 = idf(docFreq=3973, maxDocs=44218)
              0.032090448 = queryNorm
            0.15067379 = fieldWeight in 4982, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              3.4093587 = idf(docFreq=3973, maxDocs=44218)
              0.03125 = fieldNorm(doc=4982)
        0.039650064 = weight(_text_:cataloguing in 4982) [ClassicSimilarity], result of:
          0.039650064 = score(doc=4982,freq=4.0), product of:
            0.14268221 = queryWeight, product of:
              4.446252 = idf(docFreq=1408, maxDocs=44218)
              0.032090448 = queryNorm
            0.27789074 = fieldWeight in 4982, product of:
              2.0 = tf(freq=4.0), with freq of:
                4.0 = termFreq=4.0
              4.446252 = idf(docFreq=1408, maxDocs=44218)
              0.03125 = fieldNorm(doc=4982)
        0.035972122 = weight(_text_:rules in 4982) [ClassicSimilarity], result of:
          0.035972122 = score(doc=4982,freq=2.0), product of:
            0.16161752 = queryWeight, product of:
              5.036312 = idf(docFreq=780, maxDocs=44218)
              0.032090448 = queryNorm
            0.22257565 = fieldWeight in 4982, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              5.036312 = idf(docFreq=780, maxDocs=44218)
              0.03125 = fieldNorm(doc=4982)
      0.375 = coord(6/16)
    
    Abstract
    Die Datenrecherche und der Zugriff auf Informationen wurde in den letzten Jahren durch Angebote im Internet erleichtert. Während im Bibliothekswesen bereits seit Jahrzehnten Erfahrungen durch die Verbundkatalogisierung im Bereich des Datenaustauschs gesammelt wurden, wurde eine kooperative Datenhaltung zwischen Archiven erst in den letzten Jahren begonnen. In dieser Arbeit wird der Frage nachgegangen, inwieweit Daten aus Archiven und Bibliotheken in gemeinsamen Datenpools angeboten werden können: Sind die Inhalte der verschiedenen Datenkategorien ähnlich genug, um sie zusammenfassen zu können? Welche Standards liegen den Daten zugrunde? Um diese Fragen beantworten zu können, werden zunächst die verschiedenen Regelwerke des Archivs- und Bibliothekswesens zur archivischen Verzeichnung bzw. der bibliographischen Beschreibung untersucht und anschließend die darauf fußenden Austauschformate. Folgende (Regel-) Werke werden in die Analyse integiert: Papritz: Die archivische Titelaufnahme bei Sachakten, die Ordnungs- und Verzeichnungsgrundsätze für die staatlichen Archive der Deutschen Demokratischen Republik (OVG-DDR), Internationale Grundsätze für die archivische Verzeichnung (ISAD(G)), das Handbuch für Wirtschaftsarchive, Praktische Archivkunde, die Regeln für die alphabetische Katalogisierung in wissenschaftlichen Bibliotheken (RAK-WB), die Anglo-American Cataloguing Rules (AACR), General International Standard Bibliographic Description (ISBD(G)) und für den Bereich der Nachlasserschließung als Schnittstelle zwischen Archiven und Bibliotheken die Ordnungs- und Verzeichnungsgrundsätze [des Goethe- und Schiller-Archivs] (OVG-GSA), König: Verwaltung und wissenschaftliche Erschließung von Nachlässen in Literaturarchiven sowie die Regeln zur Erschließung von Nachlässen und Autographen (RNA). Von den Datenaustauschformaten werden Encoded Archival Description (EAD), Maschinelles Austauschformat für Bibliotheken (MAB) und Machine Readable Cataloguing (MARC) vorgestellt. Die Analyse zeigt, dass Daten aus Archiven und Bibliotheken in einer gemeinsamen Datenmenge zur Verfügung gestellt werden können, um sie für eine spartenübergreifende Recherche nutzbar zu machen. Es muss aber eingeräumt werden, dass im Austauschformat für ähnliche Beschreibungselemente nicht identische Kategorienummern verwendet werden können, da hierfür die Inhalte der Kategorien zu stark differieren. Aus diesem Grund kann das MAB-Format auch nicht ohne weiteres für archivische Elemente verwendet werden: Entweder müsste das bestehende MAB-Schema an die Belange des Archivwesens angepasst werden oder es müsste ein neues Austauschformat generiert werden, da auch das internationale EAD-Format nicht ohne Änderungen auf die deutsche Verzeichnungstradition abgebildet werden kann. Insbesondere wäre sowohl innerhalb der Sparten Archiv- und Bibliothekswesen als auch darüber hinaus eine tiefere Diskussion um verbindliche Regelwerke und Austauschformate zu empfehlen.
  10. Gläser, C.: Elektronischer Auskunftsdienst im Echtzeitbetrieb : Chatangebote in anglo-amerikanischen Bibliotheken - Möglichkeiten der Übertragbarkeit auf deutsche Bibliotheken (2002) 0.01
    0.010836385 = product of:
      0.17338216 = sum of:
        0.17338216 = weight(_text_:anglo in 4824) [ClassicSimilarity], result of:
          0.17338216 = score(doc=4824,freq=2.0), product of:
            0.20485519 = queryWeight, product of:
              6.3836813 = idf(docFreq=202, maxDocs=44218)
              0.032090448 = queryNorm
            0.8463645 = fieldWeight in 4824, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              6.3836813 = idf(docFreq=202, maxDocs=44218)
              0.09375 = fieldNorm(doc=4824)
      0.0625 = coord(1/16)
    
  11. Kara, S.: ¬An ontology-based retrieval system using semantic indexing (2012) 0.01
    0.01011716 = product of:
      0.08093728 = sum of:
        0.026979093 = product of:
          0.053958185 = sum of:
            0.053958185 = weight(_text_:rules in 3829) [ClassicSimilarity], result of:
              0.053958185 = score(doc=3829,freq=2.0), product of:
                0.16161752 = queryWeight, product of:
                  5.036312 = idf(docFreq=780, maxDocs=44218)
                  0.032090448 = queryNorm
                0.33386347 = fieldWeight in 3829, product of:
                  1.4142135 = tf(freq=2.0), with freq of:
                    2.0 = termFreq=2.0
                  5.036312 = idf(docFreq=780, maxDocs=44218)
                  0.046875 = fieldNorm(doc=3829)
          0.5 = coord(1/2)
        0.053958185 = weight(_text_:rules in 3829) [ClassicSimilarity], result of:
          0.053958185 = score(doc=3829,freq=2.0), product of:
            0.16161752 = queryWeight, product of:
              5.036312 = idf(docFreq=780, maxDocs=44218)
              0.032090448 = queryNorm
            0.33386347 = fieldWeight in 3829, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              5.036312 = idf(docFreq=780, maxDocs=44218)
              0.046875 = fieldNorm(doc=3829)
      0.125 = coord(2/16)
    
    Abstract
    In this thesis, we present an ontology-based information extraction and retrieval system and its application to soccer domain. In general, we deal with three issues in semantic search, namely, usability, scalability and retrieval performance. We propose a keyword-based semantic retrieval approach. The performance of the system is improved considerably using domain-specific information extraction, inference and rules. Scalability is achieved by adapting a semantic indexing approach. The system is implemented using the state-of-the-art technologies in SemanticWeb and its performance is evaluated against traditional systems as well as the query expansion methods. Furthermore, a detailed evaluation is provided to observe the performance gain due to domain-specific information extraction and inference. Finally, we show how we use semantic indexing to solve simple structural ambiguities.
  12. Walz, J.: Analyse der Übertragbarkeit allgemeiner Rankingfaktoren von Web-Suchmaschinen auf Discovery-Systeme (2018) 0.01
    0.009506433 = product of:
      0.076051466 = sum of:
        0.049520306 = weight(_text_:author in 5744) [ClassicSimilarity], result of:
          0.049520306 = score(doc=5744,freq=2.0), product of:
            0.15482868 = queryWeight, product of:
              4.824759 = idf(docFreq=964, maxDocs=44218)
              0.032090448 = queryNorm
            0.31983936 = fieldWeight in 5744, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              4.824759 = idf(docFreq=964, maxDocs=44218)
              0.046875 = fieldNorm(doc=5744)
        0.026531162 = weight(_text_:26 in 5744) [ClassicSimilarity], result of:
          0.026531162 = score(doc=5744,freq=2.0), product of:
            0.113328174 = queryWeight, product of:
              3.5315237 = idf(docFreq=3516, maxDocs=44218)
              0.032090448 = queryNorm
            0.23410915 = fieldWeight in 5744, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              3.5315237 = idf(docFreq=3516, maxDocs=44218)
              0.046875 = fieldNorm(doc=5744)
      0.125 = coord(2/16)
    
    Abstract
    Ziel: Ziel dieser Bachelorarbeit war es, die Übertragbarkeit der allgemeinen Rankingfaktoren, wie sie von Web-Suchmaschinen verwendet werden, auf Discovery-Systeme zu analysieren. Dadurch könnte das bisher hauptsächlich auf dem textuellen Abgleich zwischen Suchanfrage und Dokumenten basierende bibliothekarische Ranking verbessert werden. Methode: Hierfür wurden Faktoren aus den Gruppen Popularität, Aktualität, Lokalität, Technische Faktoren, sowie dem personalisierten Ranking diskutiert. Die entsprechenden Rankingfaktoren wurden nach ihrer Vorkommenshäufigkeit in der analysierten Literatur und der daraus abgeleiteten Wichtigkeit, ausgewählt. Ergebnis: Von den 23 untersuchten Rankingfaktoren sind 14 (61 %) direkt vom Ranking der Web-Suchmaschinen auf das Ranking der Discovery-Systeme übertragbar. Zu diesen zählen unter anderem das Klickverhalten, das Erstellungsdatum, der Nutzerstandort, sowie die Sprache. Sechs (26%) der untersuchten Faktoren sind dagegen nicht übertragbar (z.B. Aktualisierungsfrequenz und Ladegeschwindigkeit). Die Linktopologie, die Nutzungshäufigkeit, sowie die Aktualisierungsfrequenz sind mit entsprechenden Modifikationen übertragbar.
    Content
    Vgl.: https://publiscologne.th-koeln.de/frontdoor/index/index/searchtype/authorsearch/author/Julia+Walz/docId/1169/start/0/rows/10.
  13. Mao, M.: Ontology mapping : towards semantic interoperability in distributed and heterogeneous environments (2008) 0.01
    0.006744773 = product of:
      0.053958185 = sum of:
        0.017986061 = product of:
          0.035972122 = sum of:
            0.035972122 = weight(_text_:rules in 4659) [ClassicSimilarity], result of:
              0.035972122 = score(doc=4659,freq=2.0), product of:
                0.16161752 = queryWeight, product of:
                  5.036312 = idf(docFreq=780, maxDocs=44218)
                  0.032090448 = queryNorm
                0.22257565 = fieldWeight in 4659, product of:
                  1.4142135 = tf(freq=2.0), with freq of:
                    2.0 = termFreq=2.0
                  5.036312 = idf(docFreq=780, maxDocs=44218)
                  0.03125 = fieldNorm(doc=4659)
          0.5 = coord(1/2)
        0.035972122 = weight(_text_:rules in 4659) [ClassicSimilarity], result of:
          0.035972122 = score(doc=4659,freq=2.0), product of:
            0.16161752 = queryWeight, product of:
              5.036312 = idf(docFreq=780, maxDocs=44218)
              0.032090448 = queryNorm
            0.22257565 = fieldWeight in 4659, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              5.036312 = idf(docFreq=780, maxDocs=44218)
              0.03125 = fieldNorm(doc=4659)
      0.125 = coord(2/16)
    
    Abstract
    This dissertation studies ontology mapping: the problem of finding semantic correspondences between similar elements of different ontologies. In the dissertation, elements denote classes or properties of ontologies. The goal of this research is to use ontology mapping to make heterogeneous information more accessible. The World Wide Web (WWW) now is widely used as a universal medium for information exchange. Semantic interoperability among different information systems in the WWW is limited due to information heterogeneity, and the non semantic nature of HTML and URLs. Ontologies have been suggested as a way to solve the problem of information heterogeneity by providing formal, explicit definitions of data and reasoning ability over related concepts. Given that no universal ontology exists for the WWW, work has focused on finding semantic correspondences between similar elements of different ontologies, i.e., ontology mapping. Ontology mapping can be done either by hand or using automated tools. Manual mapping becomes impractical as the size and complexity of ontologies increases. Full or semi-automated mapping approaches have been examined by several research studies. Previous full or semiautomated mapping approaches include analyzing linguistic information of elements in ontologies, treating ontologies as structural graphs, applying heuristic rules and machine learning techniques, and using probabilistic and reasoning methods etc. In this paper, two generic ontology mapping approaches are proposed. One is the PRIOR+ approach, which utilizes both information retrieval and artificial intelligence techniques in the context of ontology mapping. The other is the non-instance learning based approach, which experimentally explores machine learning algorithms to solve ontology mapping problem without requesting any instance. The results of the PRIOR+ on different tests at OAEI ontology matching campaign 2007 are encouraging. The non-instance learning based approach has shown potential for solving ontology mapping problem on OAEI benchmark tests.
  14. Höllstin, A.: Bibliotheks- und Informationskompetenz (Bibliographic Instruction und Information Literacy) : Fallstudie über eine amerikanische Universitätsbibliothek basierend auf theoretischen Grundlagen und praktischen Anleitungen (Workbooks) (1997) 0.01
    0.006321225 = product of:
      0.1011396 = sum of:
        0.1011396 = weight(_text_:anglo in 1485) [ClassicSimilarity], result of:
          0.1011396 = score(doc=1485,freq=2.0), product of:
            0.20485519 = queryWeight, product of:
              6.3836813 = idf(docFreq=202, maxDocs=44218)
              0.032090448 = queryNorm
            0.49371263 = fieldWeight in 1485, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              6.3836813 = idf(docFreq=202, maxDocs=44218)
              0.0546875 = fieldNorm(doc=1485)
      0.0625 = coord(1/16)
    
    Abstract
    Die Darstellung der theoretischen Grundlagen, die zu einer strukturierten und durchdachten Benutzerschulung führen, basiert hauptsächlich auf anglo-amerikanischer Literatur mit dem Schwerpunkt bei den Situationen der Universitätsbibliotheken. Die praktische Planung, Entwicklung und Organisation des Benutzerschulungsprogramms der New Mexico State University (NMSU) in Alamogordo nach der Lehrmethode Workbook wird dargestellt. Ferner werden die Benutzerschulungsangebote einer Bewertung unterzogen, inwieweit jene die in der Literatur beschriebenen Kriterien erfüllen, auch hier auf der Basis des Library Skills Workbook der Bibliothek, Abschließend folgt ein Ausblick auf neuere Tendenzen in der Benutzschulung in den USA und die Anwendbarkeit der theoretischen Grundlagen in Deutschland
  15. Werther, S.: Bei Anruf Film : Strukturen, Methoden und Chancen produktionsintegrierter Filmrecherche im internationalen Vergleich (1997) 0.01
    0.0054181926 = product of:
      0.08669108 = sum of:
        0.08669108 = weight(_text_:anglo in 1498) [ClassicSimilarity], result of:
          0.08669108 = score(doc=1498,freq=2.0), product of:
            0.20485519 = queryWeight, product of:
              6.3836813 = idf(docFreq=202, maxDocs=44218)
              0.032090448 = queryNorm
            0.42318225 = fieldWeight in 1498, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              6.3836813 = idf(docFreq=202, maxDocs=44218)
              0.046875 = fieldNorm(doc=1498)
      0.0625 = coord(1/16)
    
    Abstract
    In einer explorartiven Studie werden Arbeitsbedingungen, -strukturen und -methoden englischer und U.S.-amerikanischer Free lance film researcher beschrieben und vor diesem Hintergrund die Möglichkeiten des Einsatzes freiberuflicher Filmrechercheure auch im Rahmen bundesdeutscher Film- und Fernsehproduktion beurteilt. Der erste Teil der Untersuchung befaßt sich mit der Notwendigkeit produktionsintegrierter Film- und Fernsehrecherche und stellt typische Aufgaben- und Verantwortungsbereiche von Filmresearchern vor. Auf der Grundlage einer leitfadengestützten, standardisierten Tiefenbefragung von 8 englischen und 8 amerikanischen Filmrechercheuren sowie von Recherchemitarbeitern in 3 deutschen Fernsehredaktionen werden zunächst die politisch-rechtlichen, medienwirtschaftlichen und quellenbezogenen Rahmenbedingungen geschildert, unter denen die anglo-amerikanischen Filmrechercheure arbeiten. Vergleichend werden deren Qulifikationen und Voraussetzungen charakterisiert, Aufgaben und Arbeitsfelder erläutert, es werden arbeitsrechtliche Grundlagen verdeutlicht sowie die besonderen Arbeitsmethoden und Hilfsmittel von Filmrechercheuren vorgestellt. Ähnlichkeiten und Unterschiede zwischen der produktionsintegrierten Filmrecherche in den USA und in Großbritannien werden der Situation von recherchierenden Mitarbeitern in deutschen Redaktionen gegenübergestellt. Zuletzt weist die Studie die ablauforganisatorischen, produktionsinhaltlichen und ökonomischen Vorteile nach, die sich durch den Einsatz von freiberuflichen Filmrechercheuren in bundesrepublikanischen Sendeanstalten und Fernsehproduktionsfirmen erzielen lassen
  16. Karlova-Bourbonus, N.: Automatic detection of contradictions in texts (2018) 0.01
    0.00505858 = product of:
      0.04046864 = sum of:
        0.013489546 = product of:
          0.026979093 = sum of:
            0.026979093 = weight(_text_:rules in 5976) [ClassicSimilarity], result of:
              0.026979093 = score(doc=5976,freq=2.0), product of:
                0.16161752 = queryWeight, product of:
                  5.036312 = idf(docFreq=780, maxDocs=44218)
                  0.032090448 = queryNorm
                0.16693173 = fieldWeight in 5976, product of:
                  1.4142135 = tf(freq=2.0), with freq of:
                    2.0 = termFreq=2.0
                  5.036312 = idf(docFreq=780, maxDocs=44218)
                  0.0234375 = fieldNorm(doc=5976)
          0.5 = coord(1/2)
        0.026979093 = weight(_text_:rules in 5976) [ClassicSimilarity], result of:
          0.026979093 = score(doc=5976,freq=2.0), product of:
            0.16161752 = queryWeight, product of:
              5.036312 = idf(docFreq=780, maxDocs=44218)
              0.032090448 = queryNorm
            0.16693173 = fieldWeight in 5976, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              5.036312 = idf(docFreq=780, maxDocs=44218)
              0.0234375 = fieldNorm(doc=5976)
      0.125 = coord(2/16)
    
    Abstract
    Natural language contradictions are of complex nature. As will be shown in Chapter 5, the realization of contradictions is not limited to the examples such as Socrates is a man and Socrates is not a man (under the condition that Socrates refers to the same object in the real world), which is discussed by Aristotle (Section 3.1.1). Empirical evidence (see Chapter 5 for more details) shows that only a few contradictions occurring in the real life are of that explicit (prototypical) kind. Rather, con-tradictions make use of a variety of natural language devices such as, e.g., paraphrasing, synonyms and antonyms, passive and active voice, diversity of negation expression, and figurative linguistic means such as idioms, irony, and metaphors. Additionally, the most so-phisticated kind of contradictions, the so-called implicit contradictions, can be found only when applying world knowledge and after conducting a sequence of logical operations such as e.g. in: (1.1) The first prize was given to the experienced grandmaster L. Stein who, in total, col-lected ten points (7 wins and 3 draws). Those familiar with the chess rules know that a chess player gets one point for winning and zero points for losing the game. In case of a draw, each player gets a half point. Built on this idea and by conducting some simple mathematical operations, we can infer that in the case of 7 wins and 3 draws (the second part of the sentence), a player can only collect 8.5 points and not 10 points. Hence, we observe that there is a contradiction between the first and the second parts of the sentence.
  17. Eppendahl, F.: Entwurf eines Konzepts für die elektronische Dokumentenverwaltung von Verträgen (1989) 0.00
    0.0044218604 = product of:
      0.07074977 = sum of:
        0.07074977 = weight(_text_:26 in 2745) [ClassicSimilarity], result of:
          0.07074977 = score(doc=2745,freq=2.0), product of:
            0.113328174 = queryWeight, product of:
              3.5315237 = idf(docFreq=3516, maxDocs=44218)
              0.032090448 = queryNorm
            0.62429106 = fieldWeight in 2745, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              3.5315237 = idf(docFreq=3516, maxDocs=44218)
              0.125 = fieldNorm(doc=2745)
      0.0625 = coord(1/16)
    
    Date
    9.12.1995 17:26:17
  18. Herr, K.: Entwicklung eines Thesaurus Medienpädagogik (1989) 0.00
    0.0044218604 = product of:
      0.07074977 = sum of:
        0.07074977 = weight(_text_:26 in 2746) [ClassicSimilarity], result of:
          0.07074977 = score(doc=2746,freq=2.0), product of:
            0.113328174 = queryWeight, product of:
              3.5315237 = idf(docFreq=3516, maxDocs=44218)
              0.032090448 = queryNorm
            0.62429106 = fieldWeight in 2746, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              3.5315237 = idf(docFreq=3516, maxDocs=44218)
              0.125 = fieldNorm(doc=2746)
      0.0625 = coord(1/16)
    
    Date
    9.12.1995 17:26:17
  19. Maag, D.: Leistungsmerkmale von Volltextretrievalsystemen : Anforderungen an die künftige Weiterentwicklung (1989) 0.00
    0.0044218604 = product of:
      0.07074977 = sum of:
        0.07074977 = weight(_text_:26 in 2747) [ClassicSimilarity], result of:
          0.07074977 = score(doc=2747,freq=2.0), product of:
            0.113328174 = queryWeight, product of:
              3.5315237 = idf(docFreq=3516, maxDocs=44218)
              0.032090448 = queryNorm
            0.62429106 = fieldWeight in 2747, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              3.5315237 = idf(docFreq=3516, maxDocs=44218)
              0.125 = fieldNorm(doc=2747)
      0.0625 = coord(1/16)
    
    Date
    9.12.1995 17:26:17
  20. Ritzler, C.: Vergleichende Untersuchung von PC-Thesaurusprogrammen (1989) 0.00
    0.0044218604 = product of:
      0.07074977 = sum of:
        0.07074977 = weight(_text_:26 in 2748) [ClassicSimilarity], result of:
          0.07074977 = score(doc=2748,freq=2.0), product of:
            0.113328174 = queryWeight, product of:
              3.5315237 = idf(docFreq=3516, maxDocs=44218)
              0.032090448 = queryNorm
            0.62429106 = fieldWeight in 2748, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              3.5315237 = idf(docFreq=3516, maxDocs=44218)
              0.125 = fieldNorm(doc=2748)
      0.0625 = coord(1/16)
    
    Date
    9.12.1995 17:26:17

Years

Languages

  • d 85
  • e 12
  • hu 1
  • More… Less…

Types