Search (31408 results, page 2 of 1571)

  1. Popper, K.R.: Three worlds : the Tanner lecture on human values. Deliverd at the University of Michigan, April 7, 1978 (1978) 0.26
    0.25719404 = product of:
      0.82302094 = sum of:
        0.063309304 = product of:
          0.1899279 = sum of:
            0.1899279 = weight(_text_:3a in 230) [ClassicSimilarity], result of:
              0.1899279 = score(doc=230,freq=2.0), product of:
                0.25345436 = queryWeight, product of:
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.029895496 = queryNorm
                0.7493574 = fieldWeight in 230, product of:
                  1.4142135 = tf(freq=2.0), with freq of:
                    2.0 = termFreq=2.0
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.0625 = fieldNorm(doc=230)
          0.33333334 = coord(1/3)
        0.1899279 = weight(_text_:2f in 230) [ClassicSimilarity], result of:
          0.1899279 = score(doc=230,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.7493574 = fieldWeight in 230, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0625 = fieldNorm(doc=230)
        0.1899279 = weight(_text_:2f in 230) [ClassicSimilarity], result of:
          0.1899279 = score(doc=230,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.7493574 = fieldWeight in 230, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0625 = fieldNorm(doc=230)
        0.1899279 = weight(_text_:2f in 230) [ClassicSimilarity], result of:
          0.1899279 = score(doc=230,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.7493574 = fieldWeight in 230, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0625 = fieldNorm(doc=230)
        0.1899279 = weight(_text_:2f in 230) [ClassicSimilarity], result of:
          0.1899279 = score(doc=230,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.7493574 = fieldWeight in 230, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0625 = fieldNorm(doc=230)
      0.3125 = coord(5/16)
    
    Source
    https%3A%2F%2Ftannerlectures.utah.edu%2F_documents%2Fa-to-z%2Fp%2Fpopper80.pdf&usg=AOvVaw3f4QRTEH-OEBmoYr2J_c7H
  2. Thissen, F.: Screen-Design-Manual : Communicating Effectively Through Multimedia (2003) 0.26
    0.25711033 = product of:
      0.51422065 = sum of:
        0.085994825 = weight(_text_:informatik in 1397) [ClassicSimilarity], result of:
          0.085994825 = score(doc=1397,freq=8.0), product of:
            0.15254098 = queryWeight, product of:
              5.1024737 = idf(docFreq=730, maxDocs=44218)
              0.029895496 = queryNorm
            0.563749 = fieldWeight in 1397, product of:
              2.828427 = tf(freq=8.0), with freq of:
                8.0 = termFreq=8.0
              5.1024737 = idf(docFreq=730, maxDocs=44218)
              0.0390625 = fieldNorm(doc=1397)
        0.14679115 = weight(_text_:monographien in 1397) [ClassicSimilarity], result of:
          0.14679115 = score(doc=1397,freq=8.0), product of:
            0.1992968 = queryWeight, product of:
              6.666449 = idf(docFreq=152, maxDocs=44218)
              0.029895496 = queryNorm
            0.73654544 = fieldWeight in 1397, product of:
              2.828427 = tf(freq=8.0), with freq of:
                8.0 = termFreq=8.0
              6.666449 = idf(docFreq=152, maxDocs=44218)
              0.0390625 = fieldNorm(doc=1397)
        0.08090937 = weight(_text_:einzelne in 1397) [ClassicSimilarity], result of:
          0.08090937 = score(doc=1397,freq=4.0), product of:
            0.17595729 = queryWeight, product of:
              5.885746 = idf(docFreq=333, maxDocs=44218)
              0.029895496 = queryNorm
            0.4598239 = fieldWeight in 1397, product of:
              2.0 = tf(freq=4.0), with freq of:
                4.0 = termFreq=4.0
              5.885746 = idf(docFreq=333, maxDocs=44218)
              0.0390625 = fieldNorm(doc=1397)
        0.0728053 = weight(_text_:anwendungen in 1397) [ClassicSimilarity], result of:
          0.0728053 = score(doc=1397,freq=4.0), product of:
            0.16691269 = queryWeight, product of:
              5.583205 = idf(docFreq=451, maxDocs=44218)
              0.029895496 = queryNorm
            0.43618792 = fieldWeight in 1397, product of:
              2.0 = tf(freq=4.0), with freq of:
                4.0 = termFreq=4.0
              5.583205 = idf(docFreq=451, maxDocs=44218)
              0.0390625 = fieldNorm(doc=1397)
        0.011653905 = weight(_text_:der in 1397) [ClassicSimilarity], result of:
          0.011653905 = score(doc=1397,freq=4.0), product of:
            0.06677957 = queryWeight, product of:
              2.2337668 = idf(docFreq=12875, maxDocs=44218)
              0.029895496 = queryNorm
            0.17451303 = fieldWeight in 1397, product of:
              2.0 = tf(freq=4.0), with freq of:
                4.0 = termFreq=4.0
              2.2337668 = idf(docFreq=12875, maxDocs=44218)
              0.0390625 = fieldNorm(doc=1397)
        0.09347347 = weight(_text_:datenverarbeitung in 1397) [ClassicSimilarity], result of:
          0.09347347 = score(doc=1397,freq=4.0), product of:
            0.18912636 = queryWeight, product of:
              6.326249 = idf(docFreq=214, maxDocs=44218)
              0.029895496 = queryNorm
            0.4942382 = fieldWeight in 1397, product of:
              2.0 = tf(freq=4.0), with freq of:
                4.0 = termFreq=4.0
              6.326249 = idf(docFreq=214, maxDocs=44218)
              0.0390625 = fieldNorm(doc=1397)
        0.012466575 = weight(_text_:information in 1397) [ClassicSimilarity], result of:
          0.012466575 = score(doc=1397,freq=12.0), product of:
            0.052480884 = queryWeight, product of:
              1.7554779 = idf(docFreq=20772, maxDocs=44218)
              0.029895496 = queryNorm
            0.23754507 = fieldWeight in 1397, product of:
              3.4641016 = tf(freq=12.0), with freq of:
                12.0 = termFreq=12.0
              1.7554779 = idf(docFreq=20772, maxDocs=44218)
              0.0390625 = fieldNorm(doc=1397)
        0.010126068 = product of:
          0.020252137 = sum of:
            0.020252137 = weight(_text_:22 in 1397) [ClassicSimilarity], result of:
              0.020252137 = score(doc=1397,freq=2.0), product of:
                0.104688935 = queryWeight, product of:
                  3.5018296 = idf(docFreq=3622, maxDocs=44218)
                  0.029895496 = queryNorm
                0.19345059 = fieldWeight in 1397, product of:
                  1.4142135 = tf(freq=2.0), with freq of:
                    2.0 = termFreq=2.0
                  3.5018296 = idf(docFreq=3622, maxDocs=44218)
                  0.0390625 = fieldNorm(doc=1397)
          0.5 = coord(1/2)
      0.5 = coord(8/16)
    
    Abstract
    The "Screen Design Manual" provides designers of interactive media with a practical working guide for preparing and presenting information that is suitable for both their target groups and the media they are using. It describes background information and relationships, clarifies them with the help of examples, and encourages further development of the language of digital media. In addition to the basics of the psychology of perception and learning, ergonomics, communication theory, imagery research, and aesthetics, the book also explores the design of navigation and orientation elements. Guidelines and checklists, along with the unique presentation of the book, support the application of information in practice.
    Classification
    ST 325 Informatik / Monographien / Einzelne Anwendungen der Datenverarbeitung / Multimedia
    ST 253 Informatik / Monographien / Software und -entwicklung / Web-Programmierwerkzeuge (A-Z)
    Content
    From the contents:.- Basics of screen design.- Navigation and orientation.- Information.- Screen layout.Interaction.- Motivation.- Innovative prospects.- Appendix.Glossary.- Literature.- Index
    Date
    22. 3.2008 14:29:25
    LCSH
    Information display systems / Formatting
    RVK
    ST 325 Informatik / Monographien / Einzelne Anwendungen der Datenverarbeitung / Multimedia
    ST 253 Informatik / Monographien / Software und -entwicklung / Web-Programmierwerkzeuge (A-Z)
    Subject
    Information display systems / Formatting
  3. Klimsa, P.: Multimedia: Anwendungen Tools und Techniken : mit einem Beitrag von Nicola Döring (1995) 0.25
    0.24972844 = product of:
      0.5708079 = sum of:
        0.072969034 = weight(_text_:informatik in 711) [ClassicSimilarity], result of:
          0.072969034 = score(doc=711,freq=4.0), product of:
            0.15254098 = queryWeight, product of:
              5.1024737 = idf(docFreq=730, maxDocs=44218)
              0.029895496 = queryNorm
            0.4783569 = fieldWeight in 711, product of:
              2.0 = tf(freq=4.0), with freq of:
                4.0 = termFreq=4.0
              5.1024737 = idf(docFreq=730, maxDocs=44218)
              0.046875 = fieldNorm(doc=711)
        0.12455644 = weight(_text_:monographien in 711) [ClassicSimilarity], result of:
          0.12455644 = score(doc=711,freq=4.0), product of:
            0.1992968 = queryWeight, product of:
              6.666449 = idf(docFreq=152, maxDocs=44218)
              0.029895496 = queryNorm
            0.6249796 = fieldWeight in 711, product of:
              2.0 = tf(freq=4.0), with freq of:
                4.0 = termFreq=4.0
              6.666449 = idf(docFreq=152, maxDocs=44218)
              0.046875 = fieldNorm(doc=711)
        0.09709124 = weight(_text_:einzelne in 711) [ClassicSimilarity], result of:
          0.09709124 = score(doc=711,freq=4.0), product of:
            0.17595729 = queryWeight, product of:
              5.885746 = idf(docFreq=333, maxDocs=44218)
              0.029895496 = queryNorm
            0.5517887 = fieldWeight in 711, product of:
              2.0 = tf(freq=4.0), with freq of:
                4.0 = termFreq=4.0
              5.885746 = idf(docFreq=333, maxDocs=44218)
              0.046875 = fieldNorm(doc=711)
        0.13813834 = weight(_text_:anwendungen in 711) [ClassicSimilarity], result of:
          0.13813834 = score(doc=711,freq=10.0), product of:
            0.16691269 = queryWeight, product of:
              5.583205 = idf(docFreq=451, maxDocs=44218)
              0.029895496 = queryNorm
            0.82760835 = fieldWeight in 711, product of:
              3.1622777 = tf(freq=10.0), with freq of:
                10.0 = termFreq=10.0
              5.583205 = idf(docFreq=451, maxDocs=44218)
              0.046875 = fieldNorm(doc=711)
        0.019777333 = weight(_text_:der in 711) [ClassicSimilarity], result of:
          0.019777333 = score(doc=711,freq=8.0), product of:
            0.06677957 = queryWeight, product of:
              2.2337668 = idf(docFreq=12875, maxDocs=44218)
              0.029895496 = queryNorm
            0.29615843 = fieldWeight in 711, product of:
              2.828427 = tf(freq=8.0), with freq of:
                8.0 = termFreq=8.0
              2.2337668 = idf(docFreq=12875, maxDocs=44218)
              0.046875 = fieldNorm(doc=711)
        0.11216817 = weight(_text_:datenverarbeitung in 711) [ClassicSimilarity], result of:
          0.11216817 = score(doc=711,freq=4.0), product of:
            0.18912636 = queryWeight, product of:
              6.326249 = idf(docFreq=214, maxDocs=44218)
              0.029895496 = queryNorm
            0.5930859 = fieldWeight in 711, product of:
              2.0 = tf(freq=4.0), with freq of:
                4.0 = termFreq=4.0
              6.326249 = idf(docFreq=214, maxDocs=44218)
              0.046875 = fieldNorm(doc=711)
        0.00610735 = weight(_text_:information in 711) [ClassicSimilarity], result of:
          0.00610735 = score(doc=711,freq=2.0), product of:
            0.052480884 = queryWeight, product of:
              1.7554779 = idf(docFreq=20772, maxDocs=44218)
              0.029895496 = queryNorm
            0.116372846 = fieldWeight in 711, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              1.7554779 = idf(docFreq=20772, maxDocs=44218)
              0.046875 = fieldNorm(doc=711)
      0.4375 = coord(7/16)
    
    Abstract
    Multimedia, die Integration und interaktive Nutzung von Text, Bild, Bewegtbild und Ton, wird die Zukunft der Information, des Lernens und der Unterhaltung in hohem Maße bestimmen. Um zu verstehen, warum Multimedia eine wichtige Technologie ist, wie Multimedia funktioniert und was es einem selbst bringt, muß man kein EDV-Experte sein. Dieses Buch erleichtert den praktischen Einstieg in die Multimedia-Welt. Mit zahlreichen Bildbeispielen und Grafiken illustriert, beschreibt es aunschaulich, welche Anwendungen für Multimedia bereits heute verbreitet sind, mit welchen spezifisachen Applikationen und Softwarewerkzeugen man umgehen muß, um multimediale Anwendungen zu erstellen, und auf welchen technischen Grundlagen und Standards Multimedia basiert. Es schildert zudem, wie sich Multimedia in den weltweiten Datennetzen durchsetzt und nicht zuletz, wie sich durch Multimedia unsere Arbeits- und Informationswelt radikal verändert
    Classification
    ST 325 Informatik / Monographien / Einzelne Anwendungen der Datenverarbeitung / Multimedia
    RVK
    ST 325 Informatik / Monographien / Einzelne Anwendungen der Datenverarbeitung / Multimedia
  4. Kuhlen, R.: Hypertext : ein nichtlineares Medium zwischen Buch und Wissensbank (1991) 0.25
    0.24762802 = product of:
      0.5660069 = sum of:
        0.1031938 = weight(_text_:informatik in 343) [ClassicSimilarity], result of:
          0.1031938 = score(doc=343,freq=18.0), product of:
            0.15254098 = queryWeight, product of:
              5.1024737 = idf(docFreq=730, maxDocs=44218)
              0.029895496 = queryNorm
            0.67649883 = fieldWeight in 343, product of:
              4.2426405 = tf(freq=18.0), with freq of:
                18.0 = termFreq=18.0
              5.1024737 = idf(docFreq=730, maxDocs=44218)
              0.03125 = fieldNorm(doc=343)
        0.14382538 = weight(_text_:monographien in 343) [ClassicSimilarity], result of:
          0.14382538 = score(doc=343,freq=12.0), product of:
            0.1992968 = queryWeight, product of:
              6.666449 = idf(docFreq=152, maxDocs=44218)
              0.029895496 = queryNorm
            0.72166425 = fieldWeight in 343, product of:
              3.4641016 = tf(freq=12.0), with freq of:
                12.0 = termFreq=12.0
              6.666449 = idf(docFreq=152, maxDocs=44218)
              0.03125 = fieldNorm(doc=343)
        0.12945499 = weight(_text_:einzelne in 343) [ClassicSimilarity], result of:
          0.12945499 = score(doc=343,freq=16.0), product of:
            0.17595729 = queryWeight, product of:
              5.885746 = idf(docFreq=333, maxDocs=44218)
              0.029895496 = queryNorm
            0.73571825 = fieldWeight in 343, product of:
              4.0 = tf(freq=16.0), with freq of:
                16.0 = termFreq=16.0
              5.885746 = idf(docFreq=333, maxDocs=44218)
              0.03125 = fieldNorm(doc=343)
        0.058244236 = weight(_text_:anwendungen in 343) [ClassicSimilarity], result of:
          0.058244236 = score(doc=343,freq=4.0), product of:
            0.16691269 = queryWeight, product of:
              5.583205 = idf(docFreq=451, maxDocs=44218)
              0.029895496 = queryNorm
            0.34895033 = fieldWeight in 343, product of:
              2.0 = tf(freq=4.0), with freq of:
                4.0 = termFreq=4.0
              5.583205 = idf(docFreq=451, maxDocs=44218)
              0.03125 = fieldNorm(doc=343)
        0.019777333 = weight(_text_:der in 343) [ClassicSimilarity], result of:
          0.019777333 = score(doc=343,freq=18.0), product of:
            0.06677957 = queryWeight, product of:
              2.2337668 = idf(docFreq=12875, maxDocs=44218)
              0.029895496 = queryNorm
            0.29615843 = fieldWeight in 343, product of:
              4.2426405 = tf(freq=18.0), with freq of:
                18.0 = termFreq=18.0
              2.2337668 = idf(docFreq=12875, maxDocs=44218)
              0.03125 = fieldNorm(doc=343)
        0.10575315 = weight(_text_:datenverarbeitung in 343) [ClassicSimilarity], result of:
          0.10575315 = score(doc=343,freq=8.0), product of:
            0.18912636 = queryWeight, product of:
              6.326249 = idf(docFreq=214, maxDocs=44218)
              0.029895496 = queryNorm
            0.55916667 = fieldWeight in 343, product of:
              2.828427 = tf(freq=8.0), with freq of:
                8.0 = termFreq=8.0
              6.326249 = idf(docFreq=214, maxDocs=44218)
              0.03125 = fieldNorm(doc=343)
        0.0057580643 = weight(_text_:information in 343) [ClassicSimilarity], result of:
          0.0057580643 = score(doc=343,freq=4.0), product of:
            0.052480884 = queryWeight, product of:
              1.7554779 = idf(docFreq=20772, maxDocs=44218)
              0.029895496 = queryNorm
            0.10971737 = fieldWeight in 343, product of:
              2.0 = tf(freq=4.0), with freq of:
                4.0 = termFreq=4.0
              1.7554779 = idf(docFreq=20772, maxDocs=44218)
              0.03125 = fieldNorm(doc=343)
      0.4375 = coord(7/16)
    
    Abstract
    Hypertext, eine neue Art der Informationsdarstellung, ist innerhalb weniger Jahre zu einem auf zahlreichen Fachkonferenzen diskutierten Thema im Umfeld von Informatik, Informationswissenschaft, K?nstlicher Intelligenz, Linguistik, Psychologie und Lerntheorie geworden. Die Faszination, die von diesem Medium auf Forschung, Entwicklung und Anwendung gleicherma~en ausgeht, beruht auf der prinzipiell nichtlinearen Organisation der Hypertexteinheiten und den benutzerfreundlichen Formen des ebenfalls nichtlinearen, flexiblen Zugriffs auf die Einheiten in einem Hypertextsystem. In dieser Einf?hrung in die Hypertextmethodik werden die wesentlichen Elemente von Hypertextsystemen, die Informationseinheiten und Verkn?pfungsarten sowie die hypertextspezifischen Navigationsformen, ausf?hrlich theoretisch und anschaulich am Beispiel existierender kommerzieller und experimenteller Hypertextsysteme behandelt. Besonderer Wert wird auf den Zusammenhang von Hypertext und Information Retrieval und die Einsatzm÷glichkeiten von Hypertext in Lernumgebungen gelegt. In theoretischer Hinsicht wird untersucht, ob sich Hypertextbenutzer mit Vorteil gegen?ber anderen Medien die Information erarbeiten k÷nnen, die sie aktuell in kritischen Situationen ben÷tigen, ob und wodurch also gegen?ber traditionellen linearen Formen ein ~informationeller Mehrwert~ erzielt wird. Au~erdem wird die M÷glichkeit diskutiert, mit Hilfe von Textanalyseverfahren und Techniken der Wissensrepr"sentation Hypertexte aus Texten automatisch aufzubauen. Das Buch enth"lt eine umfassende Bibliographie und im Anhang ein Glossar und eine strukturierte Beschreibung der wichtigsten gegenw"rtig erh"ltlichen oder in Entwicklung befindlichen Hypertextsysteme.
    BK
    54.82 / Textverarbeitung <Informatik>
    Classification
    ES 920 Allgemeine und vergleichende Sprach- und Literaturwissenschaft. Indogermanistik. Außereuropäische Sprachen und Literaturen / Spezialbereiche der allgemeinen Sprachwissenschaft / Datenverarbeitung und Sprachwissenschaft. Computerlinguistik / Formalisierte Sprachen
    ST 271 Informatik / Monographien / Software und -entwicklung / Datenbanken, Datenbanksysteme, Data base management, Informationssysteme / Einzelne Datenbanksprachen und Datenbanksysteme
    ST 281 Informatik / Monographien / Software und -entwicklung / Einzelne Benutzerschnittstellen (alphabet.)
    ST 351 H97 Informatik / Monographien / Einzelne Anwendungen der Datenverarbeitung / Textverarbeitung, Desktop Publishing / Einzelne Programme (A-Z) / Programme H / Hypertext
    54.82 / Textverarbeitung <Informatik>
    RVK
    ES 920 Allgemeine und vergleichende Sprach- und Literaturwissenschaft. Indogermanistik. Außereuropäische Sprachen und Literaturen / Spezialbereiche der allgemeinen Sprachwissenschaft / Datenverarbeitung und Sprachwissenschaft. Computerlinguistik / Formalisierte Sprachen
    ST 271 Informatik / Monographien / Software und -entwicklung / Datenbanken, Datenbanksysteme, Data base management, Informationssysteme / Einzelne Datenbanksprachen und Datenbanksysteme
    ST 281 Informatik / Monographien / Software und -entwicklung / Einzelne Benutzerschnittstellen (alphabet.)
    ST 351 H97 Informatik / Monographien / Einzelne Anwendungen der Datenverarbeitung / Textverarbeitung, Desktop Publishing / Einzelne Programme (A-Z) / Programme H / Hypertext
  5. Herb, U.; Beucke, D.: ¬Die Zukunft der Impact-Messung : Social Media, Nutzung und Zitate im World Wide Web (2013) 0.24
    0.24323684 = product of:
      0.77835786 = sum of:
        0.1899279 = weight(_text_:2f in 2188) [ClassicSimilarity], result of:
          0.1899279 = score(doc=2188,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.7493574 = fieldWeight in 2188, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0625 = fieldNorm(doc=2188)
        0.1899279 = weight(_text_:2f in 2188) [ClassicSimilarity], result of:
          0.1899279 = score(doc=2188,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.7493574 = fieldWeight in 2188, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0625 = fieldNorm(doc=2188)
        0.018646248 = weight(_text_:der in 2188) [ClassicSimilarity], result of:
          0.018646248 = score(doc=2188,freq=4.0), product of:
            0.06677957 = queryWeight, product of:
              2.2337668 = idf(docFreq=12875, maxDocs=44218)
              0.029895496 = queryNorm
            0.27922085 = fieldWeight in 2188, product of:
              2.0 = tf(freq=4.0), with freq of:
                4.0 = termFreq=4.0
              2.2337668 = idf(docFreq=12875, maxDocs=44218)
              0.0625 = fieldNorm(doc=2188)
        0.1899279 = weight(_text_:2f in 2188) [ClassicSimilarity], result of:
          0.1899279 = score(doc=2188,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.7493574 = fieldWeight in 2188, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0625 = fieldNorm(doc=2188)
        0.1899279 = weight(_text_:2f in 2188) [ClassicSimilarity], result of:
          0.1899279 = score(doc=2188,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.7493574 = fieldWeight in 2188, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0625 = fieldNorm(doc=2188)
      0.3125 = coord(5/16)
    
    Abstract
    Wissenschaftliche Karrieren und Publikationen benötigen Reputation und möglichst viel Beachtung. Literatur, die diese Aufmerksamkeit findet, wird - so die gängige Annahme - häufig zitiert. Ausgehend von dieser Überlegung wurden Verfahren der Zitationsmessung entwickelt, die Auskunft über die Relevanz oder (wie im- und explizit oft auch postuliert wird) gar die Qualität einer Publikation oder eines Wissenschaftlers geben sollen.
    Content
    Vgl. unter: https://www.leibniz-science20.de%2Fforschung%2Fprojekte%2Faltmetrics-in-verschiedenen-wissenschaftsdisziplinen%2F&ei=2jTgVaaXGcK4Udj1qdgB&usg=AFQjCNFOPdONj4RKBDf9YDJOLuz3lkGYlg&sig2=5YI3KWIGxBmk5_kv0P_8iQ.
  6. Gabler, S.: Vergabe von DDC-Sachgruppen mittels eines Schlagwort-Thesaurus (2021) 0.24
    0.24068263 = product of:
      0.55013174 = sum of:
        0.039568312 = product of:
          0.11870494 = sum of:
            0.11870494 = weight(_text_:3a in 1000) [ClassicSimilarity], result of:
              0.11870494 = score(doc=1000,freq=2.0), product of:
                0.25345436 = queryWeight, product of:
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.029895496 = queryNorm
                0.46834838 = fieldWeight in 1000, product of:
                  1.4142135 = tf(freq=2.0), with freq of:
                    2.0 = termFreq=2.0
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.0390625 = fieldNorm(doc=1000)
          0.33333334 = coord(1/3)
        0.11870494 = weight(_text_:2f in 1000) [ClassicSimilarity], result of:
          0.11870494 = score(doc=1000,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.46834838 = fieldWeight in 1000, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0390625 = fieldNorm(doc=1000)
        0.11870494 = weight(_text_:2f in 1000) [ClassicSimilarity], result of:
          0.11870494 = score(doc=1000,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.46834838 = fieldWeight in 1000, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0390625 = fieldNorm(doc=1000)
        0.028546121 = weight(_text_:der in 1000) [ClassicSimilarity], result of:
          0.028546121 = score(doc=1000,freq=24.0), product of:
            0.06677957 = queryWeight, product of:
              2.2337668 = idf(docFreq=12875, maxDocs=44218)
              0.029895496 = queryNorm
            0.42746788 = fieldWeight in 1000, product of:
              4.8989797 = tf(freq=24.0), with freq of:
                24.0 = termFreq=24.0
              2.2337668 = idf(docFreq=12875, maxDocs=44218)
              0.0390625 = fieldNorm(doc=1000)
        0.11870494 = weight(_text_:2f in 1000) [ClassicSimilarity], result of:
          0.11870494 = score(doc=1000,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.46834838 = fieldWeight in 1000, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0390625 = fieldNorm(doc=1000)
        0.11870494 = weight(_text_:2f in 1000) [ClassicSimilarity], result of:
          0.11870494 = score(doc=1000,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.46834838 = fieldWeight in 1000, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0390625 = fieldNorm(doc=1000)
        0.0071975808 = weight(_text_:information in 1000) [ClassicSimilarity], result of:
          0.0071975808 = score(doc=1000,freq=4.0), product of:
            0.052480884 = queryWeight, product of:
              1.7554779 = idf(docFreq=20772, maxDocs=44218)
              0.029895496 = queryNorm
            0.13714671 = fieldWeight in 1000, product of:
              2.0 = tf(freq=4.0), with freq of:
                4.0 = termFreq=4.0
              1.7554779 = idf(docFreq=20772, maxDocs=44218)
              0.0390625 = fieldNorm(doc=1000)
      0.4375 = coord(7/16)
    
    Abstract
    Vorgestellt wird die Konstruktion eines thematisch geordneten Thesaurus auf Basis der Sachschlagwörter der Gemeinsamen Normdatei (GND) unter Nutzung der darin enthaltenen DDC-Notationen. Oberste Ordnungsebene dieses Thesaurus werden die DDC-Sachgruppen der Deutschen Nationalbibliothek. Die Konstruktion des Thesaurus erfolgt regelbasiert unter der Nutzung von Linked Data Prinzipien in einem SPARQL Prozessor. Der Thesaurus dient der automatisierten Gewinnung von Metadaten aus wissenschaftlichen Publikationen mittels eines computerlinguistischen Extraktors. Hierzu werden digitale Volltexte verarbeitet. Dieser ermittelt die gefundenen Schlagwörter über Vergleich der Zeichenfolgen Benennungen im Thesaurus, ordnet die Treffer nach Relevanz im Text und gibt die zugeordne-ten Sachgruppen rangordnend zurück. Die grundlegende Annahme dabei ist, dass die gesuchte Sachgruppe unter den oberen Rängen zurückgegeben wird. In einem dreistufigen Verfahren wird die Leistungsfähigkeit des Verfahrens validiert. Hierzu wird zunächst anhand von Metadaten und Erkenntnissen einer Kurzautopsie ein Goldstandard aus Dokumenten erstellt, die im Online-Katalog der DNB abrufbar sind. Die Dokumente vertei-len sich über 14 der Sachgruppen mit einer Losgröße von jeweils 50 Dokumenten. Sämtliche Dokumente werden mit dem Extraktor erschlossen und die Ergebnisse der Kategorisierung do-kumentiert. Schließlich wird die sich daraus ergebende Retrievalleistung sowohl für eine harte (binäre) Kategorisierung als auch eine rangordnende Rückgabe der Sachgruppen beurteilt.
    Content
    Master thesis Master of Science (Library and Information Studies) (MSc), Universität Wien. Advisor: Christoph Steiner. Vgl.: https://www.researchgate.net/publication/371680244_Vergabe_von_DDC-Sachgruppen_mittels_eines_Schlagwort-Thesaurus. DOI: 10.25365/thesis.70030. Vgl. dazu die Präsentation unter: https://www.google.com/url?sa=i&rct=j&q=&esrc=s&source=web&cd=&ved=0CAIQw7AJahcKEwjwoZzzytz_AhUAAAAAHQAAAAAQAg&url=https%3A%2F%2Fwiki.dnb.de%2Fdownload%2Fattachments%2F252121510%2FDA3%2520Workshop-Gabler.pdf%3Fversion%3D1%26modificationDate%3D1671093170000%26api%3Dv2&psig=AOvVaw0szwENK1or3HevgvIDOfjx&ust=1687719410889597&opi=89978449.
    Imprint
    Wien / Library and Information Studies : Universität
  7. Farazi, M.: Faceted lightweight ontologies : a formalization and some experiments (2010) 0.24
    0.23862138 = product of:
      0.5454203 = sum of:
        0.039568312 = product of:
          0.11870494 = sum of:
            0.11870494 = weight(_text_:3a in 4997) [ClassicSimilarity], result of:
              0.11870494 = score(doc=4997,freq=2.0), product of:
                0.25345436 = queryWeight, product of:
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.029895496 = queryNorm
                0.46834838 = fieldWeight in 4997, product of:
                  1.4142135 = tf(freq=2.0), with freq of:
                    2.0 = termFreq=2.0
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.0390625 = fieldNorm(doc=4997)
          0.33333334 = coord(1/3)
        0.11870494 = weight(_text_:2f in 4997) [ClassicSimilarity], result of:
          0.11870494 = score(doc=4997,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.46834838 = fieldWeight in 4997, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0390625 = fieldNorm(doc=4997)
        0.11870494 = weight(_text_:2f in 4997) [ClassicSimilarity], result of:
          0.11870494 = score(doc=4997,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.46834838 = fieldWeight in 4997, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0390625 = fieldNorm(doc=4997)
        0.11870494 = weight(_text_:2f in 4997) [ClassicSimilarity], result of:
          0.11870494 = score(doc=4997,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.46834838 = fieldWeight in 4997, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0390625 = fieldNorm(doc=4997)
        0.11870494 = weight(_text_:2f in 4997) [ClassicSimilarity], result of:
          0.11870494 = score(doc=4997,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.46834838 = fieldWeight in 4997, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0390625 = fieldNorm(doc=4997)
        0.0071975808 = weight(_text_:information in 4997) [ClassicSimilarity], result of:
          0.0071975808 = score(doc=4997,freq=4.0), product of:
            0.052480884 = queryWeight, product of:
              1.7554779 = idf(docFreq=20772, maxDocs=44218)
              0.029895496 = queryNorm
            0.13714671 = fieldWeight in 4997, product of:
              2.0 = tf(freq=4.0), with freq of:
                4.0 = termFreq=4.0
              1.7554779 = idf(docFreq=20772, maxDocs=44218)
              0.0390625 = fieldNorm(doc=4997)
        0.023834616 = product of:
          0.047669232 = sum of:
            0.047669232 = weight(_text_:engineering in 4997) [ClassicSimilarity], result of:
              0.047669232 = score(doc=4997,freq=2.0), product of:
                0.16061439 = queryWeight, product of:
                  5.372528 = idf(docFreq=557, maxDocs=44218)
                  0.029895496 = queryNorm
                0.29679304 = fieldWeight in 4997, product of:
                  1.4142135 = tf(freq=2.0), with freq of:
                    2.0 = termFreq=2.0
                  5.372528 = idf(docFreq=557, maxDocs=44218)
                  0.0390625 = fieldNorm(doc=4997)
          0.5 = coord(1/2)
      0.4375 = coord(7/16)
    
    Content
    PhD Dissertation at International Doctorate School in Information and Communication Technology. Vgl.: https%3A%2F%2Fcore.ac.uk%2Fdownload%2Fpdf%2F150083013.pdf&usg=AOvVaw2n-qisNagpyT0lli_6QbAQ.
    Imprint
    Trento : University / Department of information engineering and computer science
  8. Donsbach, W.: Wahrheit in den Medien : über den Sinn eines methodischen Objektivitätsbegriffes (2001) 0.24
    0.23746859 = product of:
      0.54278535 = sum of:
        0.039568312 = product of:
          0.11870494 = sum of:
            0.11870494 = weight(_text_:3a in 5895) [ClassicSimilarity], result of:
              0.11870494 = score(doc=5895,freq=2.0), product of:
                0.25345436 = queryWeight, product of:
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.029895496 = queryNorm
                0.46834838 = fieldWeight in 5895, product of:
                  1.4142135 = tf(freq=2.0), with freq of:
                    2.0 = termFreq=2.0
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.0390625 = fieldNorm(doc=5895)
          0.33333334 = coord(1/3)
        0.11870494 = weight(_text_:2f in 5895) [ClassicSimilarity], result of:
          0.11870494 = score(doc=5895,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.46834838 = fieldWeight in 5895, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0390625 = fieldNorm(doc=5895)
        0.11870494 = weight(_text_:2f in 5895) [ClassicSimilarity], result of:
          0.11870494 = score(doc=5895,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.46834838 = fieldWeight in 5895, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0390625 = fieldNorm(doc=5895)
        0.02330781 = weight(_text_:der in 5895) [ClassicSimilarity], result of:
          0.02330781 = score(doc=5895,freq=16.0), product of:
            0.06677957 = queryWeight, product of:
              2.2337668 = idf(docFreq=12875, maxDocs=44218)
              0.029895496 = queryNorm
            0.34902605 = fieldWeight in 5895, product of:
              4.0 = tf(freq=16.0), with freq of:
                16.0 = termFreq=16.0
              2.2337668 = idf(docFreq=12875, maxDocs=44218)
              0.0390625 = fieldNorm(doc=5895)
        0.11870494 = weight(_text_:2f in 5895) [ClassicSimilarity], result of:
          0.11870494 = score(doc=5895,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.46834838 = fieldWeight in 5895, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0390625 = fieldNorm(doc=5895)
        0.11870494 = weight(_text_:2f in 5895) [ClassicSimilarity], result of:
          0.11870494 = score(doc=5895,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.46834838 = fieldWeight in 5895, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0390625 = fieldNorm(doc=5895)
        0.005089458 = weight(_text_:information in 5895) [ClassicSimilarity], result of:
          0.005089458 = score(doc=5895,freq=2.0), product of:
            0.052480884 = queryWeight, product of:
              1.7554779 = idf(docFreq=20772, maxDocs=44218)
              0.029895496 = queryNorm
            0.09697737 = fieldWeight in 5895, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              1.7554779 = idf(docFreq=20772, maxDocs=44218)
              0.0390625 = fieldNorm(doc=5895)
      0.4375 = coord(7/16)
    
    Abstract
    Das Problem der Wahrnehmung und Darstellung von Wahrheit durch die Medien führt zu vier zentralen Fragen: Wie viel Wahrheit gibt es in der Welt, über die Journalisten berichten müssen? Wie ermittelt oder recherchiert man diese Wahrheit? Wie trennt man die Spreu vom Weizen? Und wie geht man als Journalist mit dem um, was man als Wahrheit erkannt hat oder erkannt zu haben glaubt? Hier gibt es ganz offensichtlich eine Parallele zwischen Journalisten und Wissenschaftlern. Journalisten und Wissenschaftler brauchen erstens Hypothesen, zweitens geeignete Hypothesentests, drittens ein gutes Abgrenzungs-Kriterium und viertens Verfahren, um die erkannten Sachverhalte auf angemessene Weise für eine Kommunikation mit anderen zu repräsentieren, das heißt sie darzustellen. Es gibt zwei große Unterschiede zwischen Journalisten und Wissenschaftlern: Journalisten sind in der Regel auf raum-zeitlich begrenzte Aussagen aus, Wissenschaftler in der Regel auf raumzeitlich unbegrenzte Gesetze. Aber diese Unterschiede sind fließend, weil Wissenschaftler raum-zeitlich begrenzte Aussagen brauchen, um ihre All-Aussagen zu überprüfen, und Journalisten sich immer häufiger auf das Feld der allgemeinen Gesetzes-Aussagen wagen oder doch zumindest Kausalinterpretationen für soziale Phänomene anbieten. Der zweite Unterschied besteht darin, dass die Wissenschaft weitgehend professionalisiert ist (zumindest gilt dies uneingeschränkt für die Naturwissenschaften und die Medizin), was ihr relativ klare Abgrenzungs- und Güte-Kriterien beschert hat. Diese fehlen weitgehend im Journalismus.
    Content
    Der Beitrag basiert auf einem Vortrag beim 9. Ethiktag "Wissenschaft und Medien" am Zentrum für Ethik und Recht in der Medizin des Universitätsklinikums Freiburg im Februar 2001.
    Source
    Politische Meinung. 381(2001) Nr.1, S.65-74 [https%3A%2F%2Fwww.dgfe.de%2Ffileadmin%2FOrdnerRedakteure%2FSektionen%2FSek02_AEW%2FKWF%2FPublikationen_Reihe_1989-2003%2FBand_17%2FBd_17_1994_355-406_A.pdf&usg=AOvVaw2KcbRsHy5UQ9QRIUyuOLNi]
    Theme
    Information
  9. Hotho, A.; Bloehdorn, S.: Data Mining 2004 : Text classification by boosting weak learners based on terms and concepts (2004) 0.24
    0.23603138 = product of:
      0.629417 = sum of:
        0.047481976 = product of:
          0.14244592 = sum of:
            0.14244592 = weight(_text_:3a in 562) [ClassicSimilarity], result of:
              0.14244592 = score(doc=562,freq=2.0), product of:
                0.25345436 = queryWeight, product of:
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.029895496 = queryNorm
                0.56201804 = fieldWeight in 562, product of:
                  1.4142135 = tf(freq=2.0), with freq of:
                    2.0 = termFreq=2.0
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.046875 = fieldNorm(doc=562)
          0.33333334 = coord(1/3)
        0.14244592 = weight(_text_:2f in 562) [ClassicSimilarity], result of:
          0.14244592 = score(doc=562,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.56201804 = fieldWeight in 562, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.046875 = fieldNorm(doc=562)
        0.14244592 = weight(_text_:2f in 562) [ClassicSimilarity], result of:
          0.14244592 = score(doc=562,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.56201804 = fieldWeight in 562, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.046875 = fieldNorm(doc=562)
        0.14244592 = weight(_text_:2f in 562) [ClassicSimilarity], result of:
          0.14244592 = score(doc=562,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.56201804 = fieldWeight in 562, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.046875 = fieldNorm(doc=562)
        0.14244592 = weight(_text_:2f in 562) [ClassicSimilarity], result of:
          0.14244592 = score(doc=562,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.56201804 = fieldWeight in 562, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.046875 = fieldNorm(doc=562)
        0.01215128 = product of:
          0.02430256 = sum of:
            0.02430256 = weight(_text_:22 in 562) [ClassicSimilarity], result of:
              0.02430256 = score(doc=562,freq=2.0), product of:
                0.104688935 = queryWeight, product of:
                  3.5018296 = idf(docFreq=3622, maxDocs=44218)
                  0.029895496 = queryNorm
                0.23214069 = fieldWeight in 562, product of:
                  1.4142135 = tf(freq=2.0), with freq of:
                    2.0 = termFreq=2.0
                  3.5018296 = idf(docFreq=3622, maxDocs=44218)
                  0.046875 = fieldNorm(doc=562)
          0.5 = coord(1/2)
      0.375 = coord(6/16)
    
    Content
    Vgl.: http://www.google.de/url?sa=t&rct=j&q=&esrc=s&source=web&cd=1&cad=rja&ved=0CEAQFjAA&url=http%3A%2F%2Fciteseerx.ist.psu.edu%2Fviewdoc%2Fdownload%3Fdoi%3D10.1.1.91.4940%26rep%3Drep1%26type%3Dpdf&ei=dOXrUMeIDYHDtQahsIGACg&usg=AFQjCNHFWVh6gNPvnOrOS9R3rkrXCNVD-A&sig2=5I2F5evRfMnsttSgFF9g7Q&bvm=bv.1357316858,d.Yms.
    Date
    8. 1.2013 10:22:32
  10. Rieger, W.: SGML für die Praxis : Ansatz und Einsatz von ISO 8879; mit einer Einführung in HTML (1995) 0.24
    0.2351029 = product of:
      0.5373781 = sum of:
        0.06879586 = weight(_text_:informatik in 1640) [ClassicSimilarity], result of:
          0.06879586 = score(doc=1640,freq=8.0), product of:
            0.15254098 = queryWeight, product of:
              5.1024737 = idf(docFreq=730, maxDocs=44218)
              0.029895496 = queryNorm
            0.4509992 = fieldWeight in 1640, product of:
              2.828427 = tf(freq=8.0), with freq of:
                8.0 = termFreq=8.0
              5.1024737 = idf(docFreq=730, maxDocs=44218)
              0.03125 = fieldNorm(doc=1640)
        0.11743293 = weight(_text_:monographien in 1640) [ClassicSimilarity], result of:
          0.11743293 = score(doc=1640,freq=8.0), product of:
            0.1992968 = queryWeight, product of:
              6.666449 = idf(docFreq=152, maxDocs=44218)
              0.029895496 = queryNorm
            0.5892364 = fieldWeight in 1640, product of:
              2.828427 = tf(freq=8.0), with freq of:
                8.0 = termFreq=8.0
              6.666449 = idf(docFreq=152, maxDocs=44218)
              0.03125 = fieldNorm(doc=1640)
        0.12945499 = weight(_text_:einzelne in 1640) [ClassicSimilarity], result of:
          0.12945499 = score(doc=1640,freq=16.0), product of:
            0.17595729 = queryWeight, product of:
              5.885746 = idf(docFreq=333, maxDocs=44218)
              0.029895496 = queryNorm
            0.73571825 = fieldWeight in 1640, product of:
              4.0 = tf(freq=16.0), with freq of:
                16.0 = termFreq=16.0
              5.885746 = idf(docFreq=333, maxDocs=44218)
              0.03125 = fieldNorm(doc=1640)
        0.09209222 = weight(_text_:anwendungen in 1640) [ClassicSimilarity], result of:
          0.09209222 = score(doc=1640,freq=10.0), product of:
            0.16691269 = queryWeight, product of:
              5.583205 = idf(docFreq=451, maxDocs=44218)
              0.029895496 = queryNorm
            0.5517389 = fieldWeight in 1640, product of:
              3.1622777 = tf(freq=10.0), with freq of:
                10.0 = termFreq=10.0
              5.583205 = idf(docFreq=451, maxDocs=44218)
              0.03125 = fieldNorm(doc=1640)
        0.019777333 = weight(_text_:der in 1640) [ClassicSimilarity], result of:
          0.019777333 = score(doc=1640,freq=18.0), product of:
            0.06677957 = queryWeight, product of:
              2.2337668 = idf(docFreq=12875, maxDocs=44218)
              0.029895496 = queryNorm
            0.29615843 = fieldWeight in 1640, product of:
              4.2426405 = tf(freq=18.0), with freq of:
                18.0 = termFreq=18.0
              2.2337668 = idf(docFreq=12875, maxDocs=44218)
              0.03125 = fieldNorm(doc=1640)
        0.10575315 = weight(_text_:datenverarbeitung in 1640) [ClassicSimilarity], result of:
          0.10575315 = score(doc=1640,freq=8.0), product of:
            0.18912636 = queryWeight, product of:
              6.326249 = idf(docFreq=214, maxDocs=44218)
              0.029895496 = queryNorm
            0.55916667 = fieldWeight in 1640, product of:
              2.828427 = tf(freq=8.0), with freq of:
                8.0 = termFreq=8.0
              6.326249 = idf(docFreq=214, maxDocs=44218)
              0.03125 = fieldNorm(doc=1640)
        0.0040715667 = weight(_text_:information in 1640) [ClassicSimilarity], result of:
          0.0040715667 = score(doc=1640,freq=2.0), product of:
            0.052480884 = queryWeight, product of:
              1.7554779 = idf(docFreq=20772, maxDocs=44218)
              0.029895496 = queryNorm
            0.0775819 = fieldWeight in 1640, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              1.7554779 = idf(docFreq=20772, maxDocs=44218)
              0.03125 = fieldNorm(doc=1640)
      0.4375 = coord(7/16)
    
    Abstract
    Ein Grundproblem der rechnergestützten Erstellung, Bearbeitung und Archivierung von Dokumenten ist die unzureichende Wiedergabe von Inhalt und Struktur durch die heute verbreiteten Dokumentformate und Seitenbeschreibungssprachen. Die Standard Generalized Markup Language löst dieses Problem druch die präzise und flexible Beschreibung der Struktur von Dokumenten. SGML ermöglicht dadurch die vielseitige Nutzung der in Dokumenten enthaltenen Information für elektronische Publikationen, Hypertext-Systeme, Online-Dokumente, aber auch für die klassischen Formen der Publikation in Buch, Zeitschrift und Loseblattwerk. Darüber hinaus dient der ISO-Standard SGML als Austauschformat in heterogenen DV-Umgebungen. Das Buch vermittelt sowohl die für die Entscheidungsträger notwendigen Informationen als auch das Basiswissen für den Entwickler. Es gibt einen praxisorientierten Einstieg in Vorteile und Anwendungsgebiete von SGML, behandelt anhand vieler Beispiele die verschiedenen Bestandteile von Dokument-Typ-Definitionen und gibt einen Überblick über die verfügbare SGML-Software. Für Anwender von SGML und Entwickler von SGML-Anwendungen, DV-Fachleute in Verlagen, technische Dokumentare und allgemein an Dokumentverarbeitung Interessierte.
    Classification
    ST 351 Informatik / Monographien / Einzelne Anwendungen der Datenverarbeitung / Textverarbeitung, Desktop Publishing / Einzelne Programme (A-Z)
    ST 351 G47 Informatik / Monographien / Einzelne Anwendungen der Datenverarbeitung / Textverarbeitung, Desktop Publishing / Einzelne Programme (A-Z) / SGML
    RVK
    ST 351 Informatik / Monographien / Einzelne Anwendungen der Datenverarbeitung / Textverarbeitung, Desktop Publishing / Einzelne Programme (A-Z)
    ST 351 G47 Informatik / Monographien / Einzelne Anwendungen der Datenverarbeitung / Textverarbeitung, Desktop Publishing / Einzelne Programme (A-Z) / SGML
  11. Mas, S.; Marleau, Y.: Proposition of a faceted classification model to support corporate information organization and digital records management (2009) 0.23
    0.23471354 = product of:
      0.6259028 = sum of:
        0.047481976 = product of:
          0.14244592 = sum of:
            0.14244592 = weight(_text_:3a in 2918) [ClassicSimilarity], result of:
              0.14244592 = score(doc=2918,freq=2.0), product of:
                0.25345436 = queryWeight, product of:
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.029895496 = queryNorm
                0.56201804 = fieldWeight in 2918, product of:
                  1.4142135 = tf(freq=2.0), with freq of:
                    2.0 = termFreq=2.0
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.046875 = fieldNorm(doc=2918)
          0.33333334 = coord(1/3)
        0.14244592 = weight(_text_:2f in 2918) [ClassicSimilarity], result of:
          0.14244592 = score(doc=2918,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.56201804 = fieldWeight in 2918, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.046875 = fieldNorm(doc=2918)
        0.14244592 = weight(_text_:2f in 2918) [ClassicSimilarity], result of:
          0.14244592 = score(doc=2918,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.56201804 = fieldWeight in 2918, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.046875 = fieldNorm(doc=2918)
        0.14244592 = weight(_text_:2f in 2918) [ClassicSimilarity], result of:
          0.14244592 = score(doc=2918,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.56201804 = fieldWeight in 2918, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.046875 = fieldNorm(doc=2918)
        0.14244592 = weight(_text_:2f in 2918) [ClassicSimilarity], result of:
          0.14244592 = score(doc=2918,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.56201804 = fieldWeight in 2918, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.046875 = fieldNorm(doc=2918)
        0.008637097 = weight(_text_:information in 2918) [ClassicSimilarity], result of:
          0.008637097 = score(doc=2918,freq=4.0), product of:
            0.052480884 = queryWeight, product of:
              1.7554779 = idf(docFreq=20772, maxDocs=44218)
              0.029895496 = queryNorm
            0.16457605 = fieldWeight in 2918, product of:
              2.0 = tf(freq=4.0), with freq of:
                4.0 = termFreq=4.0
              1.7554779 = idf(docFreq=20772, maxDocs=44218)
              0.046875 = fieldNorm(doc=2918)
      0.375 = coord(6/16)
    
    Abstract
    The employees of an organization often use a personal hierarchical classification scheme to organize digital documents that are stored on their own workstations. As this may make it hard for other employees to retrieve these documents, there is a risk that the organization will lose track of needed documentation. Furthermore, the inherent boundaries of such a hierarchical structure require making arbitrary decisions about which specific criteria the classification will b.e based on (for instance, the administrative activity or the document type, although a document can have several attributes and require classification in several classes).A faceted classification model to support corporate information organization is proposed. Partially based on Ranganathan's facets theory, this model aims not only to standardize the organization of digital documents, but also to simplify the management of a document throughout its life cycle for both individuals and organizations, while ensuring compliance to regulatory and policy requirements.
    Footnote
    Vgl.: http://ieeexplore.ieee.org/Xplore/login.jsp?reload=true&url=http%3A%2F%2Fieeexplore.ieee.org%2Fiel5%2F4755313%2F4755314%2F04755480.pdf%3Farnumber%3D4755480&authDecision=-203.
  12. Zeng, Q.; Yu, M.; Yu, W.; Xiong, J.; Shi, Y.; Jiang, M.: Faceted hierarchy : a new graph type to organize scientific concepts and a construction method (2019) 0.23
    0.23376489 = product of:
      0.62337303 = sum of:
        0.047481976 = product of:
          0.14244592 = sum of:
            0.14244592 = weight(_text_:3a in 400) [ClassicSimilarity], result of:
              0.14244592 = score(doc=400,freq=2.0), product of:
                0.25345436 = queryWeight, product of:
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.029895496 = queryNorm
                0.56201804 = fieldWeight in 400, product of:
                  1.4142135 = tf(freq=2.0), with freq of:
                    2.0 = termFreq=2.0
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.046875 = fieldNorm(doc=400)
          0.33333334 = coord(1/3)
        0.14244592 = weight(_text_:2f in 400) [ClassicSimilarity], result of:
          0.14244592 = score(doc=400,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.56201804 = fieldWeight in 400, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.046875 = fieldNorm(doc=400)
        0.14244592 = weight(_text_:2f in 400) [ClassicSimilarity], result of:
          0.14244592 = score(doc=400,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.56201804 = fieldWeight in 400, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.046875 = fieldNorm(doc=400)
        0.14244592 = weight(_text_:2f in 400) [ClassicSimilarity], result of:
          0.14244592 = score(doc=400,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.56201804 = fieldWeight in 400, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.046875 = fieldNorm(doc=400)
        0.14244592 = weight(_text_:2f in 400) [ClassicSimilarity], result of:
          0.14244592 = score(doc=400,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.56201804 = fieldWeight in 400, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.046875 = fieldNorm(doc=400)
        0.00610735 = weight(_text_:information in 400) [ClassicSimilarity], result of:
          0.00610735 = score(doc=400,freq=2.0), product of:
            0.052480884 = queryWeight, product of:
              1.7554779 = idf(docFreq=20772, maxDocs=44218)
              0.029895496 = queryNorm
            0.116372846 = fieldWeight in 400, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              1.7554779 = idf(docFreq=20772, maxDocs=44218)
              0.046875 = fieldNorm(doc=400)
      0.375 = coord(6/16)
    
    Abstract
    On a scientific concept hierarchy, a parent concept may have a few attributes, each of which has multiple values being a group of child concepts. We call these attributes facets: classification has a few facets such as application (e.g., face recognition), model (e.g., svm, knn), and metric (e.g., precision). In this work, we aim at building faceted concept hierarchies from scientific literature. Hierarchy construction methods heavily rely on hypernym detection, however, the faceted relations are parent-to-child links but the hypernym relation is a multi-hop, i.e., ancestor-to-descendent link with a specific facet "type-of". We use information extraction techniques to find synonyms, sibling concepts, and ancestor-descendent relations from a data science corpus. And we propose a hierarchy growth algorithm to infer the parent-child links from the three types of relationships. It resolves conflicts by maintaining the acyclic structure of a hierarchy.
    Content
    Vgl.: https%3A%2F%2Faclanthology.org%2FD19-5317.pdf&usg=AOvVaw0ZZFyq5wWTtNTvNkrvjlGA.
  13. Malsburg, C. von der: ¬The correlation theory of brain function (1981) 0.23
    0.23087665 = product of:
      0.52771807 = sum of:
        0.039568312 = product of:
          0.11870494 = sum of:
            0.11870494 = weight(_text_:3a in 76) [ClassicSimilarity], result of:
              0.11870494 = score(doc=76,freq=2.0), product of:
                0.25345436 = queryWeight, product of:
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.029895496 = queryNorm
                0.46834838 = fieldWeight in 76, product of:
                  1.4142135 = tf(freq=2.0), with freq of:
                    2.0 = termFreq=2.0
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.0390625 = fieldNorm(doc=76)
          0.33333334 = coord(1/3)
        0.11870494 = weight(_text_:2f in 76) [ClassicSimilarity], result of:
          0.11870494 = score(doc=76,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.46834838 = fieldWeight in 76, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0390625 = fieldNorm(doc=76)
        0.11870494 = weight(_text_:2f in 76) [ClassicSimilarity], result of:
          0.11870494 = score(doc=76,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.46834838 = fieldWeight in 76, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0390625 = fieldNorm(doc=76)
        0.008240555 = weight(_text_:der in 76) [ClassicSimilarity], result of:
          0.008240555 = score(doc=76,freq=2.0), product of:
            0.06677957 = queryWeight, product of:
              2.2337668 = idf(docFreq=12875, maxDocs=44218)
              0.029895496 = queryNorm
            0.12339935 = fieldWeight in 76, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              2.2337668 = idf(docFreq=12875, maxDocs=44218)
              0.0390625 = fieldNorm(doc=76)
        0.11870494 = weight(_text_:2f in 76) [ClassicSimilarity], result of:
          0.11870494 = score(doc=76,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.46834838 = fieldWeight in 76, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0390625 = fieldNorm(doc=76)
        0.11870494 = weight(_text_:2f in 76) [ClassicSimilarity], result of:
          0.11870494 = score(doc=76,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.46834838 = fieldWeight in 76, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0390625 = fieldNorm(doc=76)
        0.005089458 = weight(_text_:information in 76) [ClassicSimilarity], result of:
          0.005089458 = score(doc=76,freq=2.0), product of:
            0.052480884 = queryWeight, product of:
              1.7554779 = idf(docFreq=20772, maxDocs=44218)
              0.029895496 = queryNorm
            0.09697737 = fieldWeight in 76, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              1.7554779 = idf(docFreq=20772, maxDocs=44218)
              0.0390625 = fieldNorm(doc=76)
      0.4375 = coord(7/16)
    
    Source
    http%3A%2F%2Fcogprints.org%2F1380%2F1%2FvdM_correlation.pdf&usg=AOvVaw0g7DvZbQPb2U7dYb49b9v_
    Theme
    Information
  14. Barnsley, M.F.; Hurd, L.P.: Bildkompression mit Fraktalen (1996) 0.23
    0.2276268 = product of:
      0.52028984 = sum of:
        0.085994825 = weight(_text_:informatik in 1547) [ClassicSimilarity], result of:
          0.085994825 = score(doc=1547,freq=8.0), product of:
            0.15254098 = queryWeight, product of:
              5.1024737 = idf(docFreq=730, maxDocs=44218)
              0.029895496 = queryNorm
            0.563749 = fieldWeight in 1547, product of:
              2.828427 = tf(freq=8.0), with freq of:
                8.0 = termFreq=8.0
              5.1024737 = idf(docFreq=730, maxDocs=44218)
              0.0390625 = fieldNorm(doc=1547)
        0.14679115 = weight(_text_:monographien in 1547) [ClassicSimilarity], result of:
          0.14679115 = score(doc=1547,freq=8.0), product of:
            0.1992968 = queryWeight, product of:
              6.666449 = idf(docFreq=152, maxDocs=44218)
              0.029895496 = queryNorm
            0.73654544 = fieldWeight in 1547, product of:
              2.828427 = tf(freq=8.0), with freq of:
                8.0 = termFreq=8.0
              6.666449 = idf(docFreq=152, maxDocs=44218)
              0.0390625 = fieldNorm(doc=1547)
        0.08090937 = weight(_text_:einzelne in 1547) [ClassicSimilarity], result of:
          0.08090937 = score(doc=1547,freq=4.0), product of:
            0.17595729 = queryWeight, product of:
              5.885746 = idf(docFreq=333, maxDocs=44218)
              0.029895496 = queryNorm
            0.4598239 = fieldWeight in 1547, product of:
              2.0 = tf(freq=4.0), with freq of:
                4.0 = termFreq=4.0
              5.885746 = idf(docFreq=333, maxDocs=44218)
              0.0390625 = fieldNorm(doc=1547)
        0.0728053 = weight(_text_:anwendungen in 1547) [ClassicSimilarity], result of:
          0.0728053 = score(doc=1547,freq=4.0), product of:
            0.16691269 = queryWeight, product of:
              5.583205 = idf(docFreq=451, maxDocs=44218)
              0.029895496 = queryNorm
            0.43618792 = fieldWeight in 1547, product of:
              2.0 = tf(freq=4.0), with freq of:
                4.0 = termFreq=4.0
              5.583205 = idf(docFreq=451, maxDocs=44218)
              0.0390625 = fieldNorm(doc=1547)
        0.01648111 = weight(_text_:der in 1547) [ClassicSimilarity], result of:
          0.01648111 = score(doc=1547,freq=8.0), product of:
            0.06677957 = queryWeight, product of:
              2.2337668 = idf(docFreq=12875, maxDocs=44218)
              0.029895496 = queryNorm
            0.2467987 = fieldWeight in 1547, product of:
              2.828427 = tf(freq=8.0), with freq of:
                8.0 = termFreq=8.0
              2.2337668 = idf(docFreq=12875, maxDocs=44218)
              0.0390625 = fieldNorm(doc=1547)
        0.09347347 = weight(_text_:datenverarbeitung in 1547) [ClassicSimilarity], result of:
          0.09347347 = score(doc=1547,freq=4.0), product of:
            0.18912636 = queryWeight, product of:
              6.326249 = idf(docFreq=214, maxDocs=44218)
              0.029895496 = queryNorm
            0.4942382 = fieldWeight in 1547, product of:
              2.0 = tf(freq=4.0), with freq of:
                4.0 = termFreq=4.0
              6.326249 = idf(docFreq=214, maxDocs=44218)
              0.0390625 = fieldNorm(doc=1547)
        0.023834616 = product of:
          0.047669232 = sum of:
            0.047669232 = weight(_text_:engineering in 1547) [ClassicSimilarity], result of:
              0.047669232 = score(doc=1547,freq=2.0), product of:
                0.16061439 = queryWeight, product of:
                  5.372528 = idf(docFreq=557, maxDocs=44218)
                  0.029895496 = queryNorm
                0.29679304 = fieldWeight in 1547, product of:
                  1.4142135 = tf(freq=2.0), with freq of:
                    2.0 = termFreq=2.0
                  5.372528 = idf(docFreq=557, maxDocs=44218)
                  0.0390625 = fieldNorm(doc=1547)
          0.5 = coord(1/2)
      0.4375 = coord(7/16)
    
    Classification
    ST 330 Informatik / Monographien / Einzelne Anwendungen der Datenverarbeitung / Bildverarbeitung und Mustererkennung
    ST 300 Informatik / Monographien / Künstliche Intelligenz / Allgemeines
    ZN 6050 Technik / Elektrotechnik, Elektronik, Nachrichtentechnik / Nachrichtentechnik; Telekommunikation / Mustererkennung; Bilderkennung; Bildverarbeitung (Anwendung in der Nachrichtentechnik)
    RVK
    ST 330 Informatik / Monographien / Einzelne Anwendungen der Datenverarbeitung / Bildverarbeitung und Mustererkennung
    ST 300 Informatik / Monographien / Künstliche Intelligenz / Allgemeines
    ZN 6050 Technik / Elektrotechnik, Elektronik, Nachrichtentechnik / Nachrichtentechnik; Telekommunikation / Mustererkennung; Bilderkennung; Bildverarbeitung (Anwendung in der Nachrichtentechnik)
    Series
    Multimedia engineering
  15. Vetere, G.; Lenzerini, M.: Models for semantic interoperability in service-oriented architectures (2005) 0.23
    0.22504479 = product of:
      0.7201433 = sum of:
        0.05539564 = product of:
          0.16618691 = sum of:
            0.16618691 = weight(_text_:3a in 306) [ClassicSimilarity], result of:
              0.16618691 = score(doc=306,freq=2.0), product of:
                0.25345436 = queryWeight, product of:
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.029895496 = queryNorm
                0.65568775 = fieldWeight in 306, product of:
                  1.4142135 = tf(freq=2.0), with freq of:
                    2.0 = termFreq=2.0
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.0546875 = fieldNorm(doc=306)
          0.33333334 = coord(1/3)
        0.16618691 = weight(_text_:2f in 306) [ClassicSimilarity], result of:
          0.16618691 = score(doc=306,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.65568775 = fieldWeight in 306, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0546875 = fieldNorm(doc=306)
        0.16618691 = weight(_text_:2f in 306) [ClassicSimilarity], result of:
          0.16618691 = score(doc=306,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.65568775 = fieldWeight in 306, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0546875 = fieldNorm(doc=306)
        0.16618691 = weight(_text_:2f in 306) [ClassicSimilarity], result of:
          0.16618691 = score(doc=306,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.65568775 = fieldWeight in 306, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0546875 = fieldNorm(doc=306)
        0.16618691 = weight(_text_:2f in 306) [ClassicSimilarity], result of:
          0.16618691 = score(doc=306,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.65568775 = fieldWeight in 306, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.0546875 = fieldNorm(doc=306)
      0.3125 = coord(5/16)
    
    Content
    Vgl.: http://ieeexplore.ieee.org/xpl/login.jsp?tp=&arnumber=5386707&url=http%3A%2F%2Fieeexplore.ieee.org%2Fxpls%2Fabs_all.jsp%3Farnumber%3D5386707.
  16. Rojas, R.: Theorie der neuronalen Netze : eine systematische Einführung (1993) 0.22
    0.22101724 = product of:
      0.5893793 = sum of:
        0.16851476 = weight(_text_:informatik in 1590) [ClassicSimilarity], result of:
          0.16851476 = score(doc=1590,freq=48.0), product of:
            0.15254098 = queryWeight, product of:
              5.1024737 = idf(docFreq=730, maxDocs=44218)
              0.029895496 = queryNorm
            1.104718 = fieldWeight in 1590, product of:
              6.928203 = tf(freq=48.0), with freq of:
                48.0 = termFreq=48.0
              5.1024737 = idf(docFreq=730, maxDocs=44218)
              0.03125 = fieldNorm(doc=1590)
        0.2196969 = weight(_text_:monographien in 1590) [ClassicSimilarity], result of:
          0.2196969 = score(doc=1590,freq=28.0), product of:
            0.1992968 = queryWeight, product of:
              6.666449 = idf(docFreq=152, maxDocs=44218)
              0.029895496 = queryNorm
            1.1023604 = fieldWeight in 1590, product of:
              5.2915025 = tf(freq=28.0), with freq of:
                28.0 = termFreq=28.0
              6.666449 = idf(docFreq=152, maxDocs=44218)
              0.03125 = fieldNorm(doc=1590)
        0.016148124 = weight(_text_:der in 1590) [ClassicSimilarity], result of:
          0.016148124 = score(doc=1590,freq=12.0), product of:
            0.06677957 = queryWeight, product of:
              2.2337668 = idf(docFreq=12875, maxDocs=44218)
              0.029895496 = queryNorm
            0.24181235 = fieldWeight in 1590, product of:
              3.4641016 = tf(freq=12.0), with freq of:
                12.0 = termFreq=12.0
              2.2337668 = idf(docFreq=12875, maxDocs=44218)
              0.03125 = fieldNorm(doc=1590)
        0.07477878 = weight(_text_:datenverarbeitung in 1590) [ClassicSimilarity], result of:
          0.07477878 = score(doc=1590,freq=4.0), product of:
            0.18912636 = queryWeight, product of:
              6.326249 = idf(docFreq=214, maxDocs=44218)
              0.029895496 = queryNorm
            0.39539057 = fieldWeight in 1590, product of:
              2.0 = tf(freq=4.0), with freq of:
                4.0 = termFreq=4.0
              6.326249 = idf(docFreq=214, maxDocs=44218)
              0.03125 = fieldNorm(doc=1590)
        0.10616919 = weight(_text_:wirtschaftsinformatik in 1590) [ClassicSimilarity], result of:
          0.10616919 = score(doc=1590,freq=4.0), product of:
            0.22535236 = queryWeight, product of:
              7.538004 = idf(docFreq=63, maxDocs=44218)
              0.029895496 = queryNorm
            0.47112525 = fieldWeight in 1590, product of:
              2.0 = tf(freq=4.0), with freq of:
                4.0 = termFreq=4.0
              7.538004 = idf(docFreq=63, maxDocs=44218)
              0.03125 = fieldNorm(doc=1590)
        0.0040715667 = weight(_text_:information in 1590) [ClassicSimilarity], result of:
          0.0040715667 = score(doc=1590,freq=2.0), product of:
            0.052480884 = queryWeight, product of:
              1.7554779 = idf(docFreq=20772, maxDocs=44218)
              0.029895496 = queryNorm
            0.0775819 = fieldWeight in 1590, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              1.7554779 = idf(docFreq=20772, maxDocs=44218)
              0.03125 = fieldNorm(doc=1590)
      0.375 = coord(6/16)
    
    Abstract
    In diesem Buch werden theoretische Ansätze und Modelle, die in der Literatur verstreut sind, zu einer modellübergreifenden Theorie der künstlichen neuronalen Netze zusammengefügt. Mit ständigem Blick auf die Biologie wird - ausgehend von einfachsten Netzen - gezeigt, wie sich die Eigenschaften der Modelle verändern, wenn allgemeinere Berechnungselemente und Netztopologien eingeführt werden.õJedes Kapitel enthält Beispiele und ist ausführlich illustriert und durch bibliographische Anmerkungen abgerundet. Das Buch richtet sich an Leser, die sich einen Überblick verschaffen oder vorhandene Kenntnisse vertiefen wollen. Es ist als Grundlage für Neuroinformatikvorlesungen an deutschsprachigen Universitäten geeignet.
    BK
    54.10 Theoretische Informatik
    Classification
    ST 152 Informatik / Monographien / Technische Informatik / Neurocomputer, Optische Computer u.a.
    ST 200 Informatik / Monographien / Vernetzung, verteilte Systeme / Allgemeines, Netzmanagement
    ST 300 Informatik / Monographien / Künstliche Intelligenz / Allgemeines
    QH 700 Wirtschaftswissenschaften / Mathematik. Statistik. Ökonometrie. Unternehmensforschung / Ökonometrie (einschließlich Logit-, Probit- und Tobitmodellen, d. h. Modellen mit qualitativen und begrenzt abhängigen Variablen [limited dependent variables]) / Datenverarbeitung. Wirtschaftsinformatik / Kybernetik. Neuronale Netze
    ST 285 Informatik / Monographien / Software und -entwicklung / Computer supported cooperative work (CSCW), Groupware
    ST 130 Informatik / Monographien / Grundlagen der Informatik / Theoretische Informatik / Allgemeines
    ST 150 Informatik / Monographien / Technische Informatik / Hardware, Rechnerarchitektur allgemein, von-Neumann-Architektur
    ST 301 Informatik / Monographien / Künstliche Intelligenz / Soft computing, Neuronale Netze, Fuzzy-Systeme
    54.10 Theoretische Informatik
    Footnote
    Rez. in: Knowledge organization 21(1994) no.1, S.42-43 (E. Oeser): "Therefore, over and beyond its intrinsic didactic function, this book can be recommended to all those seriously interested in the theoretical foundations of an information processing technology which has already succeeded in closing major gaps which conventional methods have so far been unable to fill"
    RVK
    ST 152 Informatik / Monographien / Technische Informatik / Neurocomputer, Optische Computer u.a.
    ST 200 Informatik / Monographien / Vernetzung, verteilte Systeme / Allgemeines, Netzmanagement
    ST 300 Informatik / Monographien / Künstliche Intelligenz / Allgemeines
    QH 700 Wirtschaftswissenschaften / Mathematik. Statistik. Ökonometrie. Unternehmensforschung / Ökonometrie (einschließlich Logit-, Probit- und Tobitmodellen, d. h. Modellen mit qualitativen und begrenzt abhängigen Variablen [limited dependent variables]) / Datenverarbeitung. Wirtschaftsinformatik / Kybernetik. Neuronale Netze
    ST 285 Informatik / Monographien / Software und -entwicklung / Computer supported cooperative work (CSCW), Groupware
    ST 130 Informatik / Monographien / Grundlagen der Informatik / Theoretische Informatik / Allgemeines
    ST 150 Informatik / Monographien / Technische Informatik / Hardware, Rechnerarchitektur allgemein, von-Neumann-Architektur
    ST 301 Informatik / Monographien / Künstliche Intelligenz / Soft computing, Neuronale Netze, Fuzzy-Systeme
  17. Huo, W.: Automatic multi-word term extraction and its application to Web-page summarization (2012) 0.22
    0.22051588 = product of:
      0.5880423 = sum of:
        0.14244592 = weight(_text_:2f in 563) [ClassicSimilarity], result of:
          0.14244592 = score(doc=563,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.56201804 = fieldWeight in 563, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.046875 = fieldNorm(doc=563)
        0.14244592 = weight(_text_:2f in 563) [ClassicSimilarity], result of:
          0.14244592 = score(doc=563,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.56201804 = fieldWeight in 563, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.046875 = fieldNorm(doc=563)
        0.14244592 = weight(_text_:2f in 563) [ClassicSimilarity], result of:
          0.14244592 = score(doc=563,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.56201804 = fieldWeight in 563, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.046875 = fieldNorm(doc=563)
        0.14244592 = weight(_text_:2f in 563) [ClassicSimilarity], result of:
          0.14244592 = score(doc=563,freq=2.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.56201804 = fieldWeight in 563, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.046875 = fieldNorm(doc=563)
        0.00610735 = weight(_text_:information in 563) [ClassicSimilarity], result of:
          0.00610735 = score(doc=563,freq=2.0), product of:
            0.052480884 = queryWeight, product of:
              1.7554779 = idf(docFreq=20772, maxDocs=44218)
              0.029895496 = queryNorm
            0.116372846 = fieldWeight in 563, product of:
              1.4142135 = tf(freq=2.0), with freq of:
                2.0 = termFreq=2.0
              1.7554779 = idf(docFreq=20772, maxDocs=44218)
              0.046875 = fieldNorm(doc=563)
        0.01215128 = product of:
          0.02430256 = sum of:
            0.02430256 = weight(_text_:22 in 563) [ClassicSimilarity], result of:
              0.02430256 = score(doc=563,freq=2.0), product of:
                0.104688935 = queryWeight, product of:
                  3.5018296 = idf(docFreq=3622, maxDocs=44218)
                  0.029895496 = queryNorm
                0.23214069 = fieldWeight in 563, product of:
                  1.4142135 = tf(freq=2.0), with freq of:
                    2.0 = termFreq=2.0
                  3.5018296 = idf(docFreq=3622, maxDocs=44218)
                  0.046875 = fieldNorm(doc=563)
          0.5 = coord(1/2)
      0.375 = coord(6/16)
    
    Abstract
    In this thesis we propose three new word association measures for multi-word term extraction. We combine these association measures with LocalMaxs algorithm in our extraction model and compare the results of different multi-word term extraction methods. Our approach is language and domain independent and requires no training data. It can be applied to such tasks as text summarization, information retrieval, and document classification. We further explore the potential of using multi-word terms as an effective representation for general web-page summarization. We extract multi-word terms from human written summaries in a large collection of web-pages, and generate the summaries by aligning document words with these multi-word terms. Our system applies machine translation technology to learn the aligning process from a training set and focuses on selecting high quality multi-word terms from human written summaries to generate suitable results for web-page summarization.
    Content
    A Thesis presented to The University of Guelph In partial fulfilment of requirements for the degree of Master of Science in Computer Science. Vgl. Unter: http://www.inf.ufrgs.br%2F~ceramisch%2Fdownload_files%2Fpublications%2F2009%2Fp01.pdf.
    Date
    10. 1.2013 19:22:47
  18. Xiong, C.: Knowledge based text representations for information retrieval (2016) 0.22
    0.21763802 = product of:
      0.58036804 = sum of:
        0.031654652 = product of:
          0.09496395 = sum of:
            0.09496395 = weight(_text_:3a in 5820) [ClassicSimilarity], result of:
              0.09496395 = score(doc=5820,freq=2.0), product of:
                0.25345436 = queryWeight, product of:
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.029895496 = queryNorm
                0.3746787 = fieldWeight in 5820, product of:
                  1.4142135 = tf(freq=2.0), with freq of:
                    2.0 = termFreq=2.0
                  8.478011 = idf(docFreq=24, maxDocs=44218)
                  0.03125 = fieldNorm(doc=5820)
          0.33333334 = coord(1/3)
        0.13429931 = weight(_text_:2f in 5820) [ClassicSimilarity], result of:
          0.13429931 = score(doc=5820,freq=4.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.5298757 = fieldWeight in 5820, product of:
              2.0 = tf(freq=4.0), with freq of:
                4.0 = termFreq=4.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.03125 = fieldNorm(doc=5820)
        0.13429931 = weight(_text_:2f in 5820) [ClassicSimilarity], result of:
          0.13429931 = score(doc=5820,freq=4.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.5298757 = fieldWeight in 5820, product of:
              2.0 = tf(freq=4.0), with freq of:
                4.0 = termFreq=4.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.03125 = fieldNorm(doc=5820)
        0.13429931 = weight(_text_:2f in 5820) [ClassicSimilarity], result of:
          0.13429931 = score(doc=5820,freq=4.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.5298757 = fieldWeight in 5820, product of:
              2.0 = tf(freq=4.0), with freq of:
                4.0 = termFreq=4.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.03125 = fieldNorm(doc=5820)
        0.13429931 = weight(_text_:2f in 5820) [ClassicSimilarity], result of:
          0.13429931 = score(doc=5820,freq=4.0), product of:
            0.25345436 = queryWeight, product of:
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.029895496 = queryNorm
            0.5298757 = fieldWeight in 5820, product of:
              2.0 = tf(freq=4.0), with freq of:
                4.0 = termFreq=4.0
              8.478011 = idf(docFreq=24, maxDocs=44218)
              0.03125 = fieldNorm(doc=5820)
        0.011516129 = weight(_text_:information in 5820) [ClassicSimilarity], result of:
          0.011516129 = score(doc=5820,freq=16.0), product of:
            0.052480884 = queryWeight, product of:
              1.7554779 = idf(docFreq=20772, maxDocs=44218)
              0.029895496 = queryNorm
            0.21943474 = fieldWeight in 5820, product of:
              4.0 = tf(freq=16.0), with freq of:
                16.0 = termFreq=16.0
              1.7554779 = idf(docFreq=20772, maxDocs=44218)
              0.03125 = fieldNorm(doc=5820)
      0.375 = coord(6/16)
    
    Abstract
    The successes of information retrieval (IR) in recent decades were built upon bag-of-words representations. Effective as it is, bag-of-words is only a shallow text understanding; there is a limited amount of information for document ranking in the word space. This dissertation goes beyond words and builds knowledge based text representations, which embed the external and carefully curated information from knowledge bases, and provide richer and structured evidence for more advanced information retrieval systems. This thesis research first builds query representations with entities associated with the query. Entities' descriptions are used by query expansion techniques that enrich the query with explanation terms. Then we present a general framework that represents a query with entities that appear in the query, are retrieved by the query, or frequently show up in the top retrieved documents. A latent space model is developed to jointly learn the connections from query to entities and the ranking of documents, modeling the external evidence from knowledge bases and internal ranking features cooperatively. To further improve the quality of relevant entities, a defining factor of our query representations, we introduce learning to rank to entity search and retrieve better entities from knowledge bases. In the document representation part, this thesis research also moves one step forward with a bag-of-entities model, in which documents are represented by their automatic entity annotations, and the ranking is performed in the entity space.
    This proposal includes plans to improve the quality of relevant entities with a co-learning framework that learns from both entity labels and document labels. We also plan to develop a hybrid ranking system that combines word based and entity based representations together with their uncertainties considered. At last, we plan to enrich the text representations with connections between entities. We propose several ways to infer entity graph representations for texts, and to rank documents using their structure representations. This dissertation overcomes the limitation of word based representations with external and carefully curated information from knowledge bases. We believe this thesis research is a solid start towards the new generation of intelligent, semantic, and structured information retrieval.
    Content
    Submitted in partial fulfillment of the requirements for the degree of Doctor of Philosophy in Language and Information Technologies. Vgl.: https%3A%2F%2Fwww.cs.cmu.edu%2F~cx%2Fpapers%2Fknowledge_based_text_representation.pdf&usg=AOvVaw0SaTSvhWLTh__Uz_HtOtl3.
  19. Datenanalyse, Klassifikation und Informationsverarbeitung : Methoden und Anwendungen in verschiedenen Fachgebieten (1992) 0.22
    0.21630391 = product of:
      0.49440897 = sum of:
        0.04864602 = weight(_text_:informatik in 1452) [ClassicSimilarity], result of:
          0.04864602 = score(doc=1452,freq=4.0), product of:
            0.15254098 = queryWeight, product of:
              5.1024737 = idf(docFreq=730, maxDocs=44218)
              0.029895496 = queryNorm
            0.3189046 = fieldWeight in 1452, product of:
              2.0 = tf(freq=4.0), with freq of:
                4.0 = termFreq=4.0
              5.1024737 = idf(docFreq=730, maxDocs=44218)
              0.03125 = fieldNorm(doc=1452)
        0.08303762 = weight(_text_:monographien in 1452) [ClassicSimilarity], result of:
          0.08303762 = score(doc=1452,freq=4.0), product of:
            0.1992968 = queryWeight, product of:
              6.666449 = idf(docFreq=152, maxDocs=44218)
              0.029895496 = queryNorm
            0.41665307 = fieldWeight in 1452, product of:
              2.0 = tf(freq=4.0), with freq of:
                4.0 = termFreq=4.0
              6.666449 = idf(docFreq=152, maxDocs=44218)
              0.03125 = fieldNorm(doc=1452)
        0.06472749 = weight(_text_:einzelne in 1452) [ClassicSimilarity], result of:
          0.06472749 = score(doc=1452,freq=4.0), product of:
            0.17595729 = queryWeight, product of:
              5.885746 = idf(docFreq=333, maxDocs=44218)
              0.029895496 = queryNorm
            0.36785913 = fieldWeight in 1452, product of:
              2.0 = tf(freq=4.0), with freq of:
                4.0 = termFreq=4.0
              5.885746 = idf(docFreq=333, maxDocs=44218)
              0.03125 = fieldNorm(doc=1452)
        0.07133433 = weight(_text_:anwendungen in 1452) [ClassicSimilarity], result of:
          0.07133433 = score(doc=1452,freq=6.0), product of:
            0.16691269 = queryWeight, product of:
              5.583205 = idf(docFreq=451, maxDocs=44218)
              0.029895496 = queryNorm
            0.42737514 = fieldWeight in 1452, product of:
              2.4494898 = tf(freq=6.0), with freq of:
                6.0 = termFreq=6.0
              5.583205 = idf(docFreq=451, maxDocs=44218)
              0.03125 = fieldNorm(doc=1452)
        0.014741153 = weight(_text_:der in 1452) [ClassicSimilarity], result of:
          0.014741153 = score(doc=1452,freq=10.0), product of:
            0.06677957 = queryWeight, product of:
              2.2337668 = idf(docFreq=12875, maxDocs=44218)
              0.029895496 = queryNorm
            0.22074346 = fieldWeight in 1452, product of:
              3.1622777 = tf(freq=10.0), with freq of:
                10.0 = termFreq=10.0
              2.2337668 = idf(docFreq=12875, maxDocs=44218)
              0.03125 = fieldNorm(doc=1452)
        0.10575315 = weight(_text_:datenverarbeitung in 1452) [ClassicSimilarity], result of:
          0.10575315 = score(doc=1452,freq=8.0), product of:
            0.18912636 = queryWeight, product of:
              6.326249 = idf(docFreq=214, maxDocs=44218)
              0.029895496 = queryNorm
            0.55916667 = fieldWeight in 1452, product of:
              2.828427 = tf(freq=8.0), with freq of:
                8.0 = termFreq=8.0
              6.326249 = idf(docFreq=214, maxDocs=44218)
              0.03125 = fieldNorm(doc=1452)
        0.10616919 = weight(_text_:wirtschaftsinformatik in 1452) [ClassicSimilarity], result of:
          0.10616919 = score(doc=1452,freq=4.0), product of:
            0.22535236 = queryWeight, product of:
              7.538004 = idf(docFreq=63, maxDocs=44218)
              0.029895496 = queryNorm
            0.47112525 = fieldWeight in 1452, product of:
              2.0 = tf(freq=4.0), with freq of:
                4.0 = termFreq=4.0
              7.538004 = idf(docFreq=63, maxDocs=44218)
              0.03125 = fieldNorm(doc=1452)
      0.4375 = coord(7/16)
    
    Classification
    QH 500 Wirtschaftswissenschaften / Mathematik. Statistik. Ökonometrie. Unternehmensforschung / Ökonometrie (einschließlich Logit-, Probit- und Tobitmodellen, d. h. Modellen mit qualitativen und begrenzt abhängigen Variablen [limited dependent variables]) / Datenverarbeitung. Wirtschaftsinformatik
    ST 320 Informatik / Monographien / Einzelne Anwendungen der Datenverarbeitung / Computergraphik
    Content
    Enthält u.a.: HAFNER, J.: Klassifikation aus wissenschaftstheoretischer Perspektive; SIMONS, P.: Philosophische Aspekte der Klassifikation; KAUFFER, M.: Sprachliche Klassifikation und Analyse von Nominalkomposita in Speisebezeichnungen anhand informatischer und statistischer Methoden; HAVEKOST, H.: Drehscheibe ISBN: Weg zum internationalen Fachwörterbuch; LORENZ, B.: Sacherschließung von Literatur durch Stichwortsuche im OPAC?; PREUSS, L.: TAXIS - ein elektronischer Bibliothekskatalog; VOLK, M., H. MITTERMAIER, A. SCHURIG u. T. BIEDASSEK: Halbautomatische Volltextanalyse, Datenbankaufbau und Document Retrieval
    Footnote
    Vorträge der 15. Jahrestagung der Gesellschaft für Klassifikation, 25.-27.2.1991 in Salzburg
    RVK
    QH 500 Wirtschaftswissenschaften / Mathematik. Statistik. Ökonometrie. Unternehmensforschung / Ökonometrie (einschließlich Logit-, Probit- und Tobitmodellen, d. h. Modellen mit qualitativen und begrenzt abhängigen Variablen [limited dependent variables]) / Datenverarbeitung. Wirtschaftsinformatik
    ST 320 Informatik / Monographien / Einzelne Anwendungen der Datenverarbeitung / Computergraphik
  20. Riehm, U.; Böhle, K.; Gabel-Becker, I.; Wingert, B.: Elektronisches Publizieren : eine kritische Bestandsaufnahme (1992) 0.21
    0.20598596 = product of:
      0.5492959 = sum of:
        0.105321735 = weight(_text_:informatik in 1585) [ClassicSimilarity], result of:
          0.105321735 = score(doc=1585,freq=12.0), product of:
            0.15254098 = queryWeight, product of:
              5.1024737 = idf(docFreq=730, maxDocs=44218)
              0.029895496 = queryNorm
            0.69044876 = fieldWeight in 1585, product of:
              3.4641016 = tf(freq=12.0), with freq of:
                12.0 = termFreq=12.0
              5.1024737 = idf(docFreq=730, maxDocs=44218)
              0.0390625 = fieldNorm(doc=1585)
        0.14679115 = weight(_text_:monographien in 1585) [ClassicSimilarity], result of:
          0.14679115 = score(doc=1585,freq=8.0), product of:
            0.1992968 = queryWeight, product of:
              6.666449 = idf(docFreq=152, maxDocs=44218)
              0.029895496 = queryNorm
            0.73654544 = fieldWeight in 1585, product of:
              2.828427 = tf(freq=8.0), with freq of:
                8.0 = termFreq=8.0
              6.666449 = idf(docFreq=152, maxDocs=44218)
              0.0390625 = fieldNorm(doc=1585)
        0.114423126 = weight(_text_:einzelne in 1585) [ClassicSimilarity], result of:
          0.114423126 = score(doc=1585,freq=8.0), product of:
            0.17595729 = queryWeight, product of:
              5.885746 = idf(docFreq=333, maxDocs=44218)
              0.029895496 = queryNorm
            0.6502892 = fieldWeight in 1585, product of:
              2.828427 = tf(freq=8.0), with freq of:
                8.0 = termFreq=8.0
              5.885746 = idf(docFreq=333, maxDocs=44218)
              0.0390625 = fieldNorm(doc=1585)
        0.0728053 = weight(_text_:anwendungen in 1585) [ClassicSimilarity], result of:
          0.0728053 = score(doc=1585,freq=4.0), product of:
            0.16691269 = queryWeight, product of:
              5.583205 = idf(docFreq=451, maxDocs=44218)
              0.029895496 = queryNorm
            0.43618792 = fieldWeight in 1585, product of:
              2.0 = tf(freq=4.0), with freq of:
                4.0 = termFreq=4.0
              5.583205 = idf(docFreq=451, maxDocs=44218)
              0.0390625 = fieldNorm(doc=1585)
        0.01648111 = weight(_text_:der in 1585) [ClassicSimilarity], result of:
          0.01648111 = score(doc=1585,freq=8.0), product of:
            0.06677957 = queryWeight, product of:
              2.2337668 = idf(docFreq=12875, maxDocs=44218)
              0.029895496 = queryNorm
            0.2467987 = fieldWeight in 1585, product of:
              2.828427 = tf(freq=8.0), with freq of:
                8.0 = termFreq=8.0
              2.2337668 = idf(docFreq=12875, maxDocs=44218)
              0.0390625 = fieldNorm(doc=1585)
        0.09347347 = weight(_text_:datenverarbeitung in 1585) [ClassicSimilarity], result of:
          0.09347347 = score(doc=1585,freq=4.0), product of:
            0.18912636 = queryWeight, product of:
              6.326249 = idf(docFreq=214, maxDocs=44218)
              0.029895496 = queryNorm
            0.4942382 = fieldWeight in 1585, product of:
              2.0 = tf(freq=4.0), with freq of:
                4.0 = termFreq=4.0
              6.326249 = idf(docFreq=214, maxDocs=44218)
              0.0390625 = fieldNorm(doc=1585)
      0.375 = coord(6/16)
    
    BK
    54.82 Textverarbeitung <Informatik>
    Classification
    ST 281 Informatik / Monographien / Software und -entwicklung / Einzelne Benutzerschnittstellen (alphabet.)
    AP 15840 Allgemeines / Medien- und Kommunikationswissenschaften, Kommunikationsdesign / Formen der Kommunikation und des Kommunikationsdesigns / Elektronisch unterstützte Formen
    ST 350 Informatik / Monographien / Einzelne Anwendungen der Datenverarbeitung / Textverarbeitung, Desktop Publishing
    54.82 Textverarbeitung <Informatik>
    RVK
    ST 281 Informatik / Monographien / Software und -entwicklung / Einzelne Benutzerschnittstellen (alphabet.)
    AP 15840 Allgemeines / Medien- und Kommunikationswissenschaften, Kommunikationsdesign / Formen der Kommunikation und des Kommunikationsdesigns / Elektronisch unterstützte Formen
    ST 350 Informatik / Monographien / Einzelne Anwendungen der Datenverarbeitung / Textverarbeitung, Desktop Publishing

Authors

Languages

Types

Themes

Subjects

Classifications