CINXE.COM
Search results for: texture feature extraction
<!DOCTYPE html> <html lang="en" dir="ltr"> <head> <!-- Google tag (gtag.js) --> <script async src="https://www.googletagmanager.com/gtag/js?id=G-P63WKM1TM1"></script> <script> window.dataLayer = window.dataLayer || []; function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-P63WKM1TM1'); </script> <!-- Yandex.Metrika counter --> <script type="text/javascript" > (function(m,e,t,r,i,k,a){m[i]=m[i]||function(){(m[i].a=m[i].a||[]).push(arguments)}; m[i].l=1*new Date(); for (var j = 0; j < document.scripts.length; j++) {if (document.scripts[j].src === r) { return; }} k=e.createElement(t),a=e.getElementsByTagName(t)[0],k.async=1,k.src=r,a.parentNode.insertBefore(k,a)}) (window, document, "script", "https://mc.yandex.ru/metrika/tag.js", "ym"); ym(55165297, "init", { clickmap:false, trackLinks:true, accurateTrackBounce:true, webvisor:false }); </script> <noscript><div><img src="https://mc.yandex.ru/watch/55165297" style="position:absolute; left:-9999px;" alt="" /></div></noscript> <!-- /Yandex.Metrika counter --> <!-- Matomo --> <script> var _paq = window._paq = window._paq || []; /* tracker methods like "setCustomDimension" should be called before "trackPageView" */ _paq.push(['trackPageView']); _paq.push(['enableLinkTracking']); (function() { var u="//matomo.waset.org/"; _paq.push(['setTrackerUrl', u+'matomo.php']); _paq.push(['setSiteId', '2']); var d=document, g=d.createElement('script'), s=d.getElementsByTagName('script')[0]; g.async=true; g.src=u+'matomo.js'; s.parentNode.insertBefore(g,s); })(); </script> <!-- End Matomo Code --> <title>Search results for: texture feature extraction</title> <meta name="description" content="Search results for: texture feature extraction"> <meta name="keywords" content="texture feature extraction"> <meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1, maximum-scale=1, user-scalable=no"> <meta charset="utf-8"> <link href="https://cdn.waset.org/favicon.ico" type="image/x-icon" rel="shortcut icon"> <link href="https://cdn.waset.org/static/plugins/bootstrap-4.2.1/css/bootstrap.min.css" rel="stylesheet"> <link href="https://cdn.waset.org/static/plugins/fontawesome/css/all.min.css" rel="stylesheet"> <link href="https://cdn.waset.org/static/css/site.css?v=150220211555" rel="stylesheet"> </head> <body> <header> <div class="container"> <nav class="navbar navbar-expand-lg navbar-light"> <a class="navbar-brand" href="https://waset.org"> <img src="https://cdn.waset.org/static/images/wasetc.png" alt="Open Science Research Excellence" title="Open Science Research Excellence" /> </a> <button class="d-block d-lg-none navbar-toggler ml-auto" type="button" data-toggle="collapse" data-target="#navbarMenu" aria-controls="navbarMenu" aria-expanded="false" aria-label="Toggle navigation"> <span class="navbar-toggler-icon"></span> </button> <div class="w-100"> <div class="d-none d-lg-flex flex-row-reverse"> <form method="get" action="https://waset.org/search" class="form-inline my-2 my-lg-0"> <input class="form-control mr-sm-2" type="search" placeholder="Search Conferences" value="texture feature extraction" name="q" aria-label="Search"> <button class="btn btn-light my-2 my-sm-0" type="submit"><i class="fas fa-search"></i></button> </form> </div> <div class="collapse navbar-collapse mt-1" id="navbarMenu"> <ul class="navbar-nav ml-auto align-items-center" id="mainNavMenu"> <li class="nav-item"> <a class="nav-link" href="https://waset.org/conferences" title="Conferences in 2025/2026/2027">Conferences</a> </li> <li class="nav-item"> <a class="nav-link" href="https://waset.org/disciplines" title="Disciplines">Disciplines</a> </li> <li class="nav-item"> <a class="nav-link" href="https://waset.org/committees" rel="nofollow">Committees</a> </li> <li class="nav-item dropdown"> <a class="nav-link dropdown-toggle" href="#" id="navbarDropdownPublications" role="button" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false"> Publications </a> <div class="dropdown-menu" aria-labelledby="navbarDropdownPublications"> <a class="dropdown-item" href="https://publications.waset.org/abstracts">Abstracts</a> <a class="dropdown-item" href="https://publications.waset.org">Periodicals</a> <a class="dropdown-item" href="https://publications.waset.org/archive">Archive</a> </div> </li> <li class="nav-item"> <a class="nav-link" href="https://waset.org/page/support" title="Support">Support</a> </li> </ul> </div> </div> </nav> </div> </header> <main> <div class="container mt-4"> <div class="row"> <div class="col-md-9 mx-auto"> <form method="get" action="https://publications.waset.org/search"> <div id="custom-search-input"> <div class="input-group"> <i class="fas fa-search"></i> <input type="text" class="search-query" name="q" placeholder="Author, Title, Abstract, Keywords" value="texture feature extraction"> <input type="submit" class="btn_search" value="Search"> </div> </div> </form> </div> </div> <div class="row mt-3"> <div class="col-sm-3"> <div class="card"> <div class="card-body"><strong>Commenced</strong> in January 2007</div> </div> </div> <div class="col-sm-3"> <div class="card"> <div class="card-body"><strong>Frequency:</strong> Monthly</div> </div> </div> <div class="col-sm-3"> <div class="card"> <div class="card-body"><strong>Edition:</strong> International</div> </div> </div> <div class="col-sm-3"> <div class="card"> <div class="card-body"><strong>Paper Count:</strong> 1621</div> </div> </div> </div> <h1 class="mt-3 mb-3 text-center" style="font-size:1.6rem;">Search results for: texture feature extraction</h1> <div class="card publication-listing mb-3 mt-3"> <h5 class="card-header" style="font-size:.9rem"><span class="badge badge-info">1621</span> Enhancing capabilities of Texture Extraction for Color Image Retrieval</h5> <div class="card-body"> <p class="card-text"><strong>Authors:</strong> <a href="https://publications.waset.org/search?q=Pranam%20Janney">Pranam Janney</a>, <a href="https://publications.waset.org/search?q=Sridhar%20G"> Sridhar G</a>, <a href="https://publications.waset.org/search?q=Sridhar%20V."> Sridhar V.</a> </p> <p class="card-text"><strong>Abstract:</strong></p> Content-Based Image Retrieval has been a major area of research in recent years. Efficient image retrieval with high precision would require an approach which combines usage of both the color and texture features of the image. In this paper we propose a method for enhancing the capabilities of texture based feature extraction and further demonstrate the use of these enhanced texture features in Texture-Based Color Image Retrieval. <p class="card-text"><strong>Keywords:</strong> <a href="https://publications.waset.org/search?q=Image%20retrieval" title="Image retrieval">Image retrieval</a>, <a href="https://publications.waset.org/search?q=texture%20feature%20extraction" title=" texture feature extraction"> texture feature extraction</a>, <a href="https://publications.waset.org/search?q=color%0Aextraction" title=" color extraction"> color extraction</a> </p> <a href="https://publications.waset.org/12008/enhancing-capabilities-of-texture-extraction-for-color-image-retrieval" class="btn btn-primary btn-sm">Procedia</a> <a href="https://publications.waset.org/12008/apa" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">APA</a> <a href="https://publications.waset.org/12008/bibtex" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">BibTeX</a> <a href="https://publications.waset.org/12008/chicago" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Chicago</a> <a href="https://publications.waset.org/12008/endnote" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">EndNote</a> <a href="https://publications.waset.org/12008/harvard" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Harvard</a> <a href="https://publications.waset.org/12008/json" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">JSON</a> <a href="https://publications.waset.org/12008/mla" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">MLA</a> <a href="https://publications.waset.org/12008/ris" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">RIS</a> <a href="https://publications.waset.org/12008/xml" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">XML</a> <a href="https://publications.waset.org/12008/iso690" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">ISO 690</a> <a href="https://publications.waset.org/12008.pdf" target="_blank" class="btn btn-primary btn-sm">PDF</a> <span class="bg-info text-light px-1 py-1 float-right rounded"> Downloads <span class="badge badge-light">1631</span> </span> </div> </div> <div class="card publication-listing mb-3 mt-3"> <h5 class="card-header" style="font-size:.9rem"><span class="badge badge-info">1620</span> Texture Feature Extraction using Slant-Hadamard Transform</h5> <div class="card-body"> <p class="card-text"><strong>Authors:</strong> <a href="https://publications.waset.org/search?q=M.%20J.%20Nassiri">M. J. Nassiri</a>, <a href="https://publications.waset.org/search?q=A.%20Vafaei"> A. Vafaei</a>, <a href="https://publications.waset.org/search?q=A.%20Monadjemi"> A. Monadjemi</a> </p> <p class="card-text"><strong>Abstract:</strong></p> Random and natural textures classification is still one of the biggest challenges in the field of image processing and pattern recognition. In this paper, texture feature extraction using Slant Hadamard Transform was studied and compared to other signal processing-based texture classification schemes. A parametric SHT was also introduced and employed for natural textures feature extraction. We showed that a subtly modified parametric SHT can outperform ordinary Walsh-Hadamard transform and discrete cosine transform. Experiments were carried out on a subset of Vistex random natural texture images using a kNN classifier. <p class="card-text"><strong>Keywords:</strong> <a href="https://publications.waset.org/search?q=Texture%20Analysis" title="Texture Analysis">Texture Analysis</a>, <a href="https://publications.waset.org/search?q=Slant%20Transform" title=" Slant Transform"> Slant Transform</a>, <a href="https://publications.waset.org/search?q=Hadamard" title=" Hadamard"> Hadamard</a>, <a href="https://publications.waset.org/search?q=DCT." title=" DCT."> DCT.</a> </p> <a href="https://publications.waset.org/11092/texture-feature-extraction-using-slant-hadamard-transform" class="btn btn-primary btn-sm">Procedia</a> <a href="https://publications.waset.org/11092/apa" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">APA</a> <a href="https://publications.waset.org/11092/bibtex" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">BibTeX</a> <a href="https://publications.waset.org/11092/chicago" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Chicago</a> <a href="https://publications.waset.org/11092/endnote" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">EndNote</a> <a href="https://publications.waset.org/11092/harvard" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Harvard</a> <a href="https://publications.waset.org/11092/json" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">JSON</a> <a href="https://publications.waset.org/11092/mla" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">MLA</a> <a href="https://publications.waset.org/11092/ris" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">RIS</a> <a href="https://publications.waset.org/11092/xml" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">XML</a> <a href="https://publications.waset.org/11092/iso690" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">ISO 690</a> <a href="https://publications.waset.org/11092.pdf" target="_blank" class="btn btn-primary btn-sm">PDF</a> <span class="bg-info text-light px-1 py-1 float-right rounded"> Downloads <span class="badge badge-light">2685</span> </span> </div> </div> <div class="card publication-listing mb-3 mt-3"> <h5 class="card-header" style="font-size:.9rem"><span class="badge badge-info">1619</span> Automatic Moment-Based Texture Segmentation</h5> <div class="card-body"> <p class="card-text"><strong>Authors:</strong> <a href="https://publications.waset.org/search?q=Tudor%20Barbu">Tudor Barbu</a> </p> <p class="card-text"><strong>Abstract:</strong></p> <p>An automatic moment-based texture segmentation approach is proposed in this paper. First, we describe the related work in this computer vision domain. Our texture feature extraction, the first part of the texture recognition process, produces a set of moment-based feature vectors. For each image pixel, a texture feature vector is computed as a sequence of area moments. Then, an automatic pixel classification approach is proposed. The feature vectors are clustered using an unsupervised classification algorithm, the optimal number of clusters being determined using a measure based on validation indexes. From the resulted pixel classes one determines easily the desired texture regions of the image.</p> <p class="card-text"><strong>Keywords:</strong> <a href="https://publications.waset.org/search?q=Image%20segmentation" title="Image segmentation">Image segmentation</a>, <a href="https://publications.waset.org/search?q=moment-based%20texture%20analysis" title=" moment-based texture analysis"> moment-based texture analysis</a>, <a href="https://publications.waset.org/search?q=automatic%20classification" title=" automatic classification"> automatic classification</a>, <a href="https://publications.waset.org/search?q=validity%20indexes." title=" validity indexes."> validity indexes.</a> </p> <a href="https://publications.waset.org/9996875/automatic-moment-based-texture-segmentation" class="btn btn-primary btn-sm">Procedia</a> <a href="https://publications.waset.org/9996875/apa" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">APA</a> <a href="https://publications.waset.org/9996875/bibtex" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">BibTeX</a> <a href="https://publications.waset.org/9996875/chicago" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Chicago</a> <a href="https://publications.waset.org/9996875/endnote" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">EndNote</a> <a href="https://publications.waset.org/9996875/harvard" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Harvard</a> <a href="https://publications.waset.org/9996875/json" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">JSON</a> <a href="https://publications.waset.org/9996875/mla" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">MLA</a> <a href="https://publications.waset.org/9996875/ris" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">RIS</a> <a href="https://publications.waset.org/9996875/xml" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">XML</a> <a href="https://publications.waset.org/9996875/iso690" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">ISO 690</a> <a href="https://publications.waset.org/9996875.pdf" target="_blank" class="btn btn-primary btn-sm">PDF</a> <span class="bg-info text-light px-1 py-1 float-right rounded"> Downloads <span class="badge badge-light">2388</span> </span> </div> </div> <div class="card publication-listing mb-3 mt-3"> <h5 class="card-header" style="font-size:.9rem"><span class="badge badge-info">1618</span> Statistical Feature Extraction Method for Wood Species Recognition System </h5> <div class="card-body"> <p class="card-text"><strong>Authors:</strong> <a href="https://publications.waset.org/search?q=Mohd%20Iz%27aan%20Paiz%20Bin%20Zamri">Mohd Iz'aan Paiz Bin Zamri</a>, <a href="https://publications.waset.org/search?q=Anis%20Salwa%20Mohd%20Khairuddin"> Anis Salwa Mohd Khairuddin</a>, <a href="https://publications.waset.org/search?q=Norrima%20Mokhtar"> Norrima Mokhtar</a>, <a href="https://publications.waset.org/search?q=Rubiyah%20Yusof"> Rubiyah Yusof</a> </p> <p class="card-text"><strong>Abstract:</strong></p> Effective statistical feature extraction and classification are important in image-based automatic inspection and analysis. An automatic wood species recognition system is designed to perform wood inspection at custom checkpoints to avoid mislabeling of timber which will results to loss of income to the timber industry. The system focuses on analyzing the statistical pores properties of the wood images. This paper proposed a fuzzy-based feature extractor which mimics the experts’ knowledge on wood texture to extract the properties of pores distribution from the wood surface texture. The proposed feature extractor consists of two steps namely pores extraction and fuzzy pores management. The total number of statistical features extracted from each wood image is 38 features. Then, a backpropagation neural network is used to classify the wood species based on the statistical features. A comprehensive set of experiments on a database composed of 5200 macroscopic images from 52 tropical wood species was used to evaluate the performance of the proposed feature extractor. The advantage of the proposed feature extraction technique is that it mimics the experts’ interpretation on wood texture which allows human involvement when analyzing the wood texture. Experimental results show the efficiency of the proposed method. <p class="card-text"><strong>Keywords:</strong> <a href="https://publications.waset.org/search?q=Classification" title="Classification">Classification</a>, <a href="https://publications.waset.org/search?q=fuzzy" title=" fuzzy"> fuzzy</a>, <a href="https://publications.waset.org/search?q=inspection%20system" title=" inspection system"> inspection system</a>, <a href="https://publications.waset.org/search?q=image%20analysis." title=" image analysis."> image analysis.</a> </p> <a href="https://publications.waset.org/10003749/statistical-feature-extraction-method-for-wood-species-recognition-system" class="btn btn-primary btn-sm">Procedia</a> <a href="https://publications.waset.org/10003749/apa" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">APA</a> <a href="https://publications.waset.org/10003749/bibtex" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">BibTeX</a> <a href="https://publications.waset.org/10003749/chicago" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Chicago</a> <a href="https://publications.waset.org/10003749/endnote" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">EndNote</a> <a href="https://publications.waset.org/10003749/harvard" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Harvard</a> <a href="https://publications.waset.org/10003749/json" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">JSON</a> <a href="https://publications.waset.org/10003749/mla" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">MLA</a> <a href="https://publications.waset.org/10003749/ris" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">RIS</a> <a href="https://publications.waset.org/10003749/xml" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">XML</a> <a href="https://publications.waset.org/10003749/iso690" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">ISO 690</a> <a href="https://publications.waset.org/10003749.pdf" target="_blank" class="btn btn-primary btn-sm">PDF</a> <span class="bg-info text-light px-1 py-1 float-right rounded"> Downloads <span class="badge badge-light">1756</span> </span> </div> </div> <div class="card publication-listing mb-3 mt-3"> <h5 class="card-header" style="font-size:.9rem"><span class="badge badge-info">1617</span> Target Detection with Improved Image Texture Feature Coding Method and Support Vector Machine</h5> <div class="card-body"> <p class="card-text"><strong>Authors:</strong> <a href="https://publications.waset.org/search?q=R.%20Xu">R. Xu</a>, <a href="https://publications.waset.org/search?q=X.%20Zhao"> X. Zhao</a>, <a href="https://publications.waset.org/search?q=X.%20Li"> X. Li</a>, <a href="https://publications.waset.org/search?q=C.%20Kwan"> C. Kwan</a>, <a href="https://publications.waset.org/search?q=C.-I%20Chang"> C.-I Chang</a> </p> <p class="card-text"><strong>Abstract:</strong></p> <p>An image texture analysis and target recognition approach of using an improved image texture feature coding method (TFCM) and Support Vector Machine (SVM) for target detection is presented. With our proposed target detection framework, targets of interest can be detected accurately. Cascade-Sliding-Window technique was also developed for automated target localization. Application to mammogram showed that over 88% of normal mammograms and 80% of abnormal mammograms can be correctly identified. The approach was also successfully applied to Synthetic Aperture Radar (SAR) and Ground Penetrating Radar (GPR) images for target detection.</p> <p class="card-text"><strong>Keywords:</strong> <a href="https://publications.waset.org/search?q=Image%20texture%20analysis" title="Image texture analysis">Image texture analysis</a>, <a href="https://publications.waset.org/search?q=feature%20extraction" title=" feature extraction"> feature extraction</a>, <a href="https://publications.waset.org/search?q=target%20detection" title=" target detection"> target detection</a>, <a href="https://publications.waset.org/search?q=pattern%20classification." title=" pattern classification."> pattern classification.</a> </p> <a href="https://publications.waset.org/6253/target-detection-with-improved-image-texture-feature-coding-method-and-support-vector-machine" class="btn btn-primary btn-sm">Procedia</a> <a href="https://publications.waset.org/6253/apa" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">APA</a> <a href="https://publications.waset.org/6253/bibtex" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">BibTeX</a> <a href="https://publications.waset.org/6253/chicago" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Chicago</a> <a href="https://publications.waset.org/6253/endnote" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">EndNote</a> <a href="https://publications.waset.org/6253/harvard" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Harvard</a> <a href="https://publications.waset.org/6253/json" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">JSON</a> <a href="https://publications.waset.org/6253/mla" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">MLA</a> <a href="https://publications.waset.org/6253/ris" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">RIS</a> <a href="https://publications.waset.org/6253/xml" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">XML</a> <a href="https://publications.waset.org/6253/iso690" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">ISO 690</a> <a href="https://publications.waset.org/6253.pdf" target="_blank" class="btn btn-primary btn-sm">PDF</a> <span class="bg-info text-light px-1 py-1 float-right rounded"> Downloads <span class="badge badge-light">1785</span> </span> </div> </div> <div class="card publication-listing mb-3 mt-3"> <h5 class="card-header" style="font-size:.9rem"><span class="badge badge-info">1616</span> Fuzzy Based Visual Texture Feature for Psoriasis Image Analysis</h5> <div class="card-body"> <p class="card-text"><strong>Authors:</strong> <a href="https://publications.waset.org/search?q=G.%20Murugeswari">G. Murugeswari</a>, <a href="https://publications.waset.org/search?q=A.%20Suruliandi"> A. Suruliandi</a> </p> <p class="card-text"><strong>Abstract:</strong></p> <p>This paper proposes a rotational invariant texture feature based on the roughness property of the image for psoriasis image analysis. In this work, we have applied this feature for image classification and segmentation. The fuzzy concept is employed to overcome the imprecision of roughness. Since the psoriasis lesion is modeled by a rough surface, the feature is extended for calculating the Psoriasis Area Severity Index value. For classification and segmentation, the Nearest Neighbor algorithm is applied. We have obtained promising results for identifying affected lesions by using the roughness index and severity level estimation.</p> <p class="card-text"><strong>Keywords:</strong> <a href="https://publications.waset.org/search?q=Fuzzy%20texture%20feature" title="Fuzzy texture feature">Fuzzy texture feature</a>, <a href="https://publications.waset.org/search?q=psoriasis" title=" psoriasis"> psoriasis</a>, <a href="https://publications.waset.org/search?q=roughness%20feature" title=" roughness feature"> roughness feature</a>, <a href="https://publications.waset.org/search?q=skin%20disease." title=" skin disease."> skin disease.</a> </p> <a href="https://publications.waset.org/10000824/fuzzy-based-visual-texture-feature-for-psoriasis-image-analysis" class="btn btn-primary btn-sm">Procedia</a> <a href="https://publications.waset.org/10000824/apa" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">APA</a> <a href="https://publications.waset.org/10000824/bibtex" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">BibTeX</a> <a href="https://publications.waset.org/10000824/chicago" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Chicago</a> <a href="https://publications.waset.org/10000824/endnote" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">EndNote</a> <a href="https://publications.waset.org/10000824/harvard" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Harvard</a> <a href="https://publications.waset.org/10000824/json" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">JSON</a> <a href="https://publications.waset.org/10000824/mla" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">MLA</a> <a href="https://publications.waset.org/10000824/ris" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">RIS</a> <a href="https://publications.waset.org/10000824/xml" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">XML</a> <a href="https://publications.waset.org/10000824/iso690" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">ISO 690</a> <a href="https://publications.waset.org/10000824.pdf" target="_blank" class="btn btn-primary btn-sm">PDF</a> <span class="bg-info text-light px-1 py-1 float-right rounded"> Downloads <span class="badge badge-light">2126</span> </span> </div> </div> <div class="card publication-listing mb-3 mt-3"> <h5 class="card-header" style="font-size:.9rem"><span class="badge badge-info">1615</span> Evaluation of Robust Feature Descriptors for Texture Classification</h5> <div class="card-body"> <p class="card-text"><strong>Authors:</strong> <a href="https://publications.waset.org/search?q=Jia-Hong%20Lee">Jia-Hong Lee</a>, <a href="https://publications.waset.org/search?q=Mei-Yi%20Wu"> Mei-Yi Wu</a>, <a href="https://publications.waset.org/search?q=Hsien-Tsung%20Kuo"> Hsien-Tsung Kuo</a> </p> <p class="card-text"><strong>Abstract:</strong></p> Texture is an important characteristic in real and synthetic scenes. Texture analysis plays a critical role in inspecting surfaces and provides important techniques in a variety of applications. Although several descriptors have been presented to extract texture features, the development of object recognition is still a difficult task due to the complex aspects of texture. Recently, many robust and scaling-invariant image features such as SIFT, SURF and ORB have been successfully used in image retrieval and object recognition. In this paper, we have tried to compare the performance for texture classification using these feature descriptors with k-means clustering. Different classifiers including K-NN, Naive Bayes, Back Propagation Neural Network , Decision Tree and Kstar were applied in three texture image sets - UIUCTex, KTH-TIPS and Brodatz, respectively. Experimental results reveal SIFTS as the best average accuracy rate holder in UIUCTex, KTH-TIPS and SURF is advantaged in Brodatz texture set. BP neuro network works best in the test set classification among all used classifiers. <p class="card-text"><strong>Keywords:</strong> <a href="https://publications.waset.org/search?q=Texture%20classification" title="Texture classification">Texture classification</a>, <a href="https://publications.waset.org/search?q=texture%20descriptor" title=" texture descriptor"> texture descriptor</a>, <a href="https://publications.waset.org/search?q=SIFT" title=" SIFT"> SIFT</a>, <a href="https://publications.waset.org/search?q=SURF" title=" SURF"> SURF</a>, <a href="https://publications.waset.org/search?q=ORB." title=" ORB."> ORB.</a> </p> <a href="https://publications.waset.org/10003623/evaluation-of-robust-feature-descriptors-for-texture-classification" class="btn btn-primary btn-sm">Procedia</a> <a href="https://publications.waset.org/10003623/apa" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">APA</a> <a href="https://publications.waset.org/10003623/bibtex" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">BibTeX</a> <a href="https://publications.waset.org/10003623/chicago" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Chicago</a> <a href="https://publications.waset.org/10003623/endnote" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">EndNote</a> <a href="https://publications.waset.org/10003623/harvard" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Harvard</a> <a href="https://publications.waset.org/10003623/json" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">JSON</a> <a href="https://publications.waset.org/10003623/mla" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">MLA</a> <a href="https://publications.waset.org/10003623/ris" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">RIS</a> <a href="https://publications.waset.org/10003623/xml" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">XML</a> <a href="https://publications.waset.org/10003623/iso690" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">ISO 690</a> <a href="https://publications.waset.org/10003623.pdf" target="_blank" class="btn btn-primary btn-sm">PDF</a> <span class="bg-info text-light px-1 py-1 float-right rounded"> Downloads <span class="badge badge-light">1630</span> </span> </div> </div> <div class="card publication-listing mb-3 mt-3"> <h5 class="card-header" style="font-size:.9rem"><span class="badge badge-info">1614</span> Frame Texture Classification Method (FTCM) Applied on Mammograms for Detection of Abnormalities</h5> <div class="card-body"> <p class="card-text"><strong>Authors:</strong> <a href="https://publications.waset.org/search?q=Kjersti%20Engan">Kjersti Engan</a>, <a href="https://publications.waset.org/search?q=Karl%20Skretting"> Karl Skretting</a>, <a href="https://publications.waset.org/search?q=Jostein%20Herredsvela"> Jostein Herredsvela</a>, <a href="https://publications.waset.org/search?q=Thor%20Ole%20Gulsrud"> Thor Ole Gulsrud</a> </p> <p class="card-text"><strong>Abstract:</strong></p> Texture classification is an important image processing task with a broad application range. Many different techniques for texture classification have been explored. Using sparse approximation as a feature extraction method for texture classification is a relatively new approach, and Skretting et al. recently presented the Frame Texture Classification Method (FTCM), showing very good results on classical texture images. As an extension of that work the FTCM is here tested on a real world application as detection of abnormalities in mammograms. Some extensions to the original FTCM that are useful in some applications are implemented; two different smoothing techniques and a vector augmentation technique. Both detection of microcalcifications (as a primary detection technique and as a last stage of a detection scheme), and soft tissue lesions in mammograms are explored. All the results are interesting, and especially the results using FTCM on regions of interest as the last stage in a detection scheme for microcalcifications are promising. <p class="card-text"><strong>Keywords:</strong> <a href="https://publications.waset.org/search?q=detection" title="detection">detection</a>, <a href="https://publications.waset.org/search?q=mammogram" title=" mammogram"> mammogram</a>, <a href="https://publications.waset.org/search?q=texture%20classification" title=" texture classification"> texture classification</a>, <a href="https://publications.waset.org/search?q=dictionary%0Alearning" title=" dictionary learning"> dictionary learning</a>, <a href="https://publications.waset.org/search?q=FTCM" title=" FTCM"> FTCM</a> </p> <a href="https://publications.waset.org/9112/frame-texture-classification-method-ftcm-applied-on-mammograms-for-detection-of-abnormalities" class="btn btn-primary btn-sm">Procedia</a> <a href="https://publications.waset.org/9112/apa" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">APA</a> <a href="https://publications.waset.org/9112/bibtex" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">BibTeX</a> <a href="https://publications.waset.org/9112/chicago" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Chicago</a> <a href="https://publications.waset.org/9112/endnote" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">EndNote</a> <a href="https://publications.waset.org/9112/harvard" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Harvard</a> <a href="https://publications.waset.org/9112/json" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">JSON</a> <a href="https://publications.waset.org/9112/mla" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">MLA</a> <a href="https://publications.waset.org/9112/ris" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">RIS</a> <a href="https://publications.waset.org/9112/xml" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">XML</a> <a href="https://publications.waset.org/9112/iso690" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">ISO 690</a> <a href="https://publications.waset.org/9112.pdf" target="_blank" class="btn btn-primary btn-sm">PDF</a> <span class="bg-info text-light px-1 py-1 float-right rounded"> Downloads <span class="badge badge-light">1407</span> </span> </div> </div> <div class="card publication-listing mb-3 mt-3"> <h5 class="card-header" style="font-size:.9rem"><span class="badge badge-info">1613</span> Unsupervised Texture Segmentation via Applying Geodesic Active Regions to Gaborian Feature Space</h5> <div class="card-body"> <p class="card-text"><strong>Authors:</strong> <a href="https://publications.waset.org/search?q=Yuan%20He">Yuan He</a>, <a href="https://publications.waset.org/search?q=Yupin%20Luo"> Yupin Luo</a>, <a href="https://publications.waset.org/search?q=Dongcheng%20Hu"> Dongcheng Hu</a> </p> <p class="card-text"><strong>Abstract:</strong></p> <p>In this paper, we propose a novel variational method for unsupervised texture segmentation. We use a Gabor filter bank to extract texture features. Some of the filtered channels form a multidimensional Gaborian feature space. To avoid deforming contours directly in a vector-valued space we use a Gaussian mixture model to describe the statistical distribution of this space and get the boundary and region probabilities. Then a framework of geodesic active regions is applied based on them. In the end, experimental results are presented, and show that this method can obtain satisfied boundaries between different texture regions.</p> <p class="card-text"><strong>Keywords:</strong> <a href="https://publications.waset.org/search?q=Texture%20segmentation" title="Texture segmentation">Texture segmentation</a>, <a href="https://publications.waset.org/search?q=Gabor%20filter" title=" Gabor filter"> Gabor filter</a>, <a href="https://publications.waset.org/search?q=snakes" title=" snakes"> snakes</a>, <a href="https://publications.waset.org/search?q=Geodesicactive%20regions" title=" Geodesicactive regions"> Geodesicactive regions</a> </p> <a href="https://publications.waset.org/5071/unsupervised-texture-segmentation-via-applying-geodesic-active-regions-to-gaborian-feature-space" class="btn btn-primary btn-sm">Procedia</a> <a href="https://publications.waset.org/5071/apa" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">APA</a> <a href="https://publications.waset.org/5071/bibtex" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">BibTeX</a> <a href="https://publications.waset.org/5071/chicago" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Chicago</a> <a href="https://publications.waset.org/5071/endnote" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">EndNote</a> <a href="https://publications.waset.org/5071/harvard" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Harvard</a> <a href="https://publications.waset.org/5071/json" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">JSON</a> <a href="https://publications.waset.org/5071/mla" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">MLA</a> <a href="https://publications.waset.org/5071/ris" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">RIS</a> <a href="https://publications.waset.org/5071/xml" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">XML</a> <a href="https://publications.waset.org/5071/iso690" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">ISO 690</a> <a href="https://publications.waset.org/5071.pdf" target="_blank" class="btn btn-primary btn-sm">PDF</a> <span class="bg-info text-light px-1 py-1 float-right rounded"> Downloads <span class="badge badge-light">1782</span> </span> </div> </div> <div class="card publication-listing mb-3 mt-3"> <h5 class="card-header" style="font-size:.9rem"><span class="badge badge-info">1612</span> Variance Based Component Analysis for Texture Segmentation</h5> <div class="card-body"> <p class="card-text"><strong>Authors:</strong> <a href="https://publications.waset.org/search?q=Zeinab%20Ghasemi">Zeinab Ghasemi</a>, <a href="https://publications.waset.org/search?q=S.%20Amirhassan%20Monadjemi"> S. Amirhassan Monadjemi</a>, <a href="https://publications.waset.org/search?q=Abbas%20Vafaei"> Abbas Vafaei</a> </p> <p class="card-text"><strong>Abstract:</strong></p> This paper presents a comparative analysis of a new unsupervised PCA-based technique for steel plates texture segmentation towards defect detection. The proposed scheme called Variance Based Component Analysis or VBCA employs PCA for feature extraction, applies a feature reduction algorithm based on variance of eigenpictures and classifies the pixels as defective and normal. While the classic PCA uses a clusterer like Kmeans for pixel clustering, VBCA employs thresholding and some post processing operations to label pixels as defective and normal. The experimental results show that proposed algorithm called VBCA is 12.46% more accurate and 78.85% faster than the classic PCA. <p class="card-text"><strong>Keywords:</strong> <a href="https://publications.waset.org/search?q=Principal%20Component%20Analysis%3B%20Variance%20Based%20Component%0AAnalysis%3B%20Defect%20Detection%3B%20Texture%20Segmentation." title="Principal Component Analysis; Variance Based Component Analysis; Defect Detection; Texture Segmentation.">Principal Component Analysis; Variance Based Component Analysis; Defect Detection; Texture Segmentation.</a> </p> <a href="https://publications.waset.org/14101/variance-based-component-analysis-for-texture-segmentation" class="btn btn-primary btn-sm">Procedia</a> <a href="https://publications.waset.org/14101/apa" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">APA</a> <a href="https://publications.waset.org/14101/bibtex" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">BibTeX</a> <a href="https://publications.waset.org/14101/chicago" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Chicago</a> <a href="https://publications.waset.org/14101/endnote" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">EndNote</a> <a href="https://publications.waset.org/14101/harvard" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Harvard</a> <a href="https://publications.waset.org/14101/json" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">JSON</a> <a href="https://publications.waset.org/14101/mla" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">MLA</a> <a href="https://publications.waset.org/14101/ris" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">RIS</a> <a href="https://publications.waset.org/14101/xml" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">XML</a> <a href="https://publications.waset.org/14101/iso690" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">ISO 690</a> <a href="https://publications.waset.org/14101.pdf" target="_blank" class="btn btn-primary btn-sm">PDF</a> <span class="bg-info text-light px-1 py-1 float-right rounded"> Downloads <span class="badge badge-light">1989</span> </span> </div> </div> <div class="card publication-listing mb-3 mt-3"> <h5 class="card-header" style="font-size:.9rem"><span class="badge badge-info">1611</span> Hybrid Color-Texture Space for Image Classification</h5> <div class="card-body"> <p class="card-text"><strong>Authors:</strong> <a href="https://publications.waset.org/search?q=Hassan%20El%20Maia">Hassan El Maia</a>, <a href="https://publications.waset.org/search?q=Ahmed%20Hammouch"> Ahmed Hammouch</a>, <a href="https://publications.waset.org/search?q=Driss%20Aboutajdine"> Driss Aboutajdine</a> </p> <p class="card-text"><strong>Abstract:</strong></p> <p>This work presents an approach for the construction of a hybrid color-texture space by using mutual information. Feature extraction is done by the Laws filter with SVM (Support Vectors Machine) as a classifier. The classification is applied on the VisTex database and a SPOT HRV (XS) image representing two forest areas in the region of Rabat in Morocco. The result of classification obtained in the hybrid space is compared with the one obtained in the RGB color space.</p> <p class="card-text"><strong>Keywords:</strong> <a href="https://publications.waset.org/search?q=Color" title="Color">Color</a>, <a href="https://publications.waset.org/search?q=texture" title=" texture"> texture</a>, <a href="https://publications.waset.org/search?q=laws%20filter" title=" laws filter"> laws filter</a>, <a href="https://publications.waset.org/search?q=mutual%20information" title=" mutual information"> mutual information</a>, <a href="https://publications.waset.org/search?q=SVM" title=" SVM"> SVM</a>, <a href="https://publications.waset.org/search?q=hybrid%20space." title=" hybrid space."> hybrid space.</a> </p> <a href="https://publications.waset.org/8321/hybrid-color-texture-space-for-image-classification" class="btn btn-primary btn-sm">Procedia</a> <a href="https://publications.waset.org/8321/apa" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">APA</a> <a href="https://publications.waset.org/8321/bibtex" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">BibTeX</a> <a href="https://publications.waset.org/8321/chicago" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Chicago</a> <a href="https://publications.waset.org/8321/endnote" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">EndNote</a> <a href="https://publications.waset.org/8321/harvard" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Harvard</a> <a href="https://publications.waset.org/8321/json" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">JSON</a> <a href="https://publications.waset.org/8321/mla" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">MLA</a> <a href="https://publications.waset.org/8321/ris" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">RIS</a> <a href="https://publications.waset.org/8321/xml" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">XML</a> <a href="https://publications.waset.org/8321/iso690" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">ISO 690</a> <a href="https://publications.waset.org/8321.pdf" target="_blank" class="btn btn-primary btn-sm">PDF</a> <span class="bg-info text-light px-1 py-1 float-right rounded"> Downloads <span class="badge badge-light">1837</span> </span> </div> </div> <div class="card publication-listing mb-3 mt-3"> <h5 class="card-header" style="font-size:.9rem"><span class="badge badge-info">1610</span> A Multiresolution Approach for Noised Texture Classification based on the Co-occurrence Matrix and First Order Statistics</h5> <div class="card-body"> <p class="card-text"><strong>Authors:</strong> <a href="https://publications.waset.org/search?q=M.%20Ben%20Othmen">M. Ben Othmen</a>, <a href="https://publications.waset.org/search?q=M.%20Sayadi"> M. Sayadi</a>, <a href="https://publications.waset.org/search?q=F.%20Fnaiech"> F. Fnaiech</a> </p> <p class="card-text"><strong>Abstract:</strong></p> Wavelet transform provides several important characteristics which can be used in a texture analysis and classification. In this work, an efficient texture classification method, which combines concepts from wavelet and co-occurrence matrices, is presented. An Euclidian distance classifier is used to evaluate the various methods of classification. A comparative study is essential to determine the ideal method. Using this conjecture, we developed a novel feature set for texture classification and demonstrate its effectiveness <p class="card-text"><strong>Keywords:</strong> <a href="https://publications.waset.org/search?q=Classification" title="Classification">Classification</a>, <a href="https://publications.waset.org/search?q=Wavelet" title=" Wavelet"> Wavelet</a>, <a href="https://publications.waset.org/search?q=Co-occurrence" title=" Co-occurrence"> Co-occurrence</a>, <a href="https://publications.waset.org/search?q=Euclidian%0ADistance" title=" Euclidian Distance"> Euclidian Distance</a>, <a href="https://publications.waset.org/search?q=Classifier" title=" Classifier"> Classifier</a>, <a href="https://publications.waset.org/search?q=Texture." title=" Texture."> Texture.</a> </p> <a href="https://publications.waset.org/4636/a-multiresolution-approach-for-noised-texture-classification-based-on-the-co-occurrence-matrix-and-first-order-statistics" class="btn btn-primary btn-sm">Procedia</a> <a href="https://publications.waset.org/4636/apa" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">APA</a> <a href="https://publications.waset.org/4636/bibtex" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">BibTeX</a> <a href="https://publications.waset.org/4636/chicago" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Chicago</a> <a href="https://publications.waset.org/4636/endnote" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">EndNote</a> <a href="https://publications.waset.org/4636/harvard" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Harvard</a> <a href="https://publications.waset.org/4636/json" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">JSON</a> <a href="https://publications.waset.org/4636/mla" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">MLA</a> <a href="https://publications.waset.org/4636/ris" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">RIS</a> <a href="https://publications.waset.org/4636/xml" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">XML</a> <a href="https://publications.waset.org/4636/iso690" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">ISO 690</a> <a href="https://publications.waset.org/4636.pdf" target="_blank" class="btn btn-primary btn-sm">PDF</a> <span class="bg-info text-light px-1 py-1 float-right rounded"> Downloads <span class="badge badge-light">1493</span> </span> </div> </div> <div class="card publication-listing mb-3 mt-3"> <h5 class="card-header" style="font-size:.9rem"><span class="badge badge-info">1609</span> Standard Deviation of Mean and Variance of Rows and Columns of Images for CBIR</h5> <div class="card-body"> <p class="card-text"><strong>Authors:</strong> <a href="https://publications.waset.org/search?q=H.%20B.%20Kekre">H. B. Kekre</a>, <a href="https://publications.waset.org/search?q=Kavita%20Patil"> Kavita Patil</a> </p> <p class="card-text"><strong>Abstract:</strong></p> <p>This paper describes a novel and effective approach to content-based image retrieval (CBIR) that represents each image in the database by a vector of feature values called “Standard deviation of mean vectors of color distribution of rows and columns of images for CBIR". In many areas of commerce, government, academia, and hospitals, large collections of digital images are being created. This paper describes the approach that uses contents as feature vector for retrieval of similar images. There are several classes of features that are used to specify queries: colour, texture, shape, spatial layout. Colour features are often easily obtained directly from the pixel intensities. In this paper feature extraction is done for the texture descriptor that is 'variance' and 'Variance of Variances'. First standard deviation of each row and column mean is calculated for R, G, and B planes. These six values are obtained for one image which acts as a feature vector. Secondly we calculate variance of the row and column of R, G and B planes of an image. Then six standard deviations of these variance sequences are calculated to form a feature vector of dimension six. We applied our approach to a database of 300 BMP images. We have determined the capability of automatic indexing by analyzing image content: color and texture as features and by applying a similarity measure Euclidean distance.</p> <p class="card-text"><strong>Keywords:</strong> <a href="https://publications.waset.org/search?q=Standard%20deviation%20Image%20retrieval" title="Standard deviation Image retrieval">Standard deviation Image retrieval</a>, <a href="https://publications.waset.org/search?q=color%0D%0Adistribution" title=" color distribution"> color distribution</a>, <a href="https://publications.waset.org/search?q=Variance" title=" Variance"> Variance</a>, <a href="https://publications.waset.org/search?q=Variance%20of%20Variance" title=" Variance of Variance"> Variance of Variance</a>, <a href="https://publications.waset.org/search?q=Euclidean%20distance." title=" Euclidean distance."> Euclidean distance.</a> </p> <a href="https://publications.waset.org/6125/standard-deviation-of-mean-and-variance-of-rows-and-columns-of-images-for-cbir" class="btn btn-primary btn-sm">Procedia</a> <a href="https://publications.waset.org/6125/apa" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">APA</a> <a href="https://publications.waset.org/6125/bibtex" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">BibTeX</a> <a href="https://publications.waset.org/6125/chicago" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Chicago</a> <a href="https://publications.waset.org/6125/endnote" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">EndNote</a> <a href="https://publications.waset.org/6125/harvard" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Harvard</a> <a href="https://publications.waset.org/6125/json" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">JSON</a> <a href="https://publications.waset.org/6125/mla" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">MLA</a> <a href="https://publications.waset.org/6125/ris" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">RIS</a> <a href="https://publications.waset.org/6125/xml" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">XML</a> <a href="https://publications.waset.org/6125/iso690" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">ISO 690</a> <a href="https://publications.waset.org/6125.pdf" target="_blank" class="btn btn-primary btn-sm">PDF</a> <span class="bg-info text-light px-1 py-1 float-right rounded"> Downloads <span class="badge badge-light">3755</span> </span> </div> </div> <div class="card publication-listing mb-3 mt-3"> <h5 class="card-header" style="font-size:.9rem"><span class="badge badge-info">1608</span> Study of Features for Hand-printed Recognition</h5> <div class="card-body"> <p class="card-text"><strong>Authors:</strong> <a href="https://publications.waset.org/search?q=Satish%20Kumar">Satish Kumar</a> </p> <p class="card-text"><strong>Abstract:</strong></p> The feature extraction method(s) used to recognize hand-printed characters play an important role in ICR applications. In order to achieve high recognition rate for a recognition system, the choice of a feature that suits for the given script is certainly an important task. Even if a new feature required to be designed for a given script, it is essential to know the recognition ability of the existing features for that script. Devanagari script is being used in various Indian languages besides Hindi the mother tongue of majority of Indians. This research examines a variety of feature extraction approaches, which have been used in various ICR/OCR applications, in context to Devanagari hand-printed script. The study is conducted theoretically and experimentally on more that 10 feature extraction methods. The various feature extraction methods have been evaluated on Devanagari hand-printed database comprising more than 25000 characters belonging to 43 alphabets. The recognition ability of the features have been evaluated using three classifiers i.e. k-NN, MLP and SVM. <p class="card-text"><strong>Keywords:</strong> <a href="https://publications.waset.org/search?q=Features" title="Features">Features</a>, <a href="https://publications.waset.org/search?q=Hand-printed" title=" Hand-printed"> Hand-printed</a>, <a href="https://publications.waset.org/search?q=Devanagari" title=" Devanagari"> Devanagari</a>, <a href="https://publications.waset.org/search?q=Classifier" title=" Classifier"> Classifier</a>, <a href="https://publications.waset.org/search?q=Database" title=" Database"> Database</a> </p> <a href="https://publications.waset.org/8497/study-of-features-for-hand-printed-recognition" class="btn btn-primary btn-sm">Procedia</a> <a href="https://publications.waset.org/8497/apa" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">APA</a> <a href="https://publications.waset.org/8497/bibtex" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">BibTeX</a> <a href="https://publications.waset.org/8497/chicago" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Chicago</a> <a href="https://publications.waset.org/8497/endnote" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">EndNote</a> <a href="https://publications.waset.org/8497/harvard" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Harvard</a> <a href="https://publications.waset.org/8497/json" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">JSON</a> <a href="https://publications.waset.org/8497/mla" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">MLA</a> <a href="https://publications.waset.org/8497/ris" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">RIS</a> <a href="https://publications.waset.org/8497/xml" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">XML</a> <a href="https://publications.waset.org/8497/iso690" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">ISO 690</a> <a href="https://publications.waset.org/8497.pdf" target="_blank" class="btn btn-primary btn-sm">PDF</a> <span class="bg-info text-light px-1 py-1 float-right rounded"> Downloads <span class="badge badge-light">1741</span> </span> </div> </div> <div class="card publication-listing mb-3 mt-3"> <h5 class="card-header" style="font-size:.9rem"><span class="badge badge-info">1607</span> Development of a Semantic Wiki-based Feature Library for the Extraction of Manufacturing Feature and Manufacturing Information</h5> <div class="card-body"> <p class="card-text"><strong>Authors:</strong> <a href="https://publications.waset.org/search?q=Hendry%20Muljadi">Hendry Muljadi</a>, <a href="https://publications.waset.org/search?q=Hideaki%20Takeda"> Hideaki Takeda</a>, <a href="https://publications.waset.org/search?q=Koichi%20Ando"> Koichi Ando</a> </p> <p class="card-text"><strong>Abstract:</strong></p> A manufacturing feature can be defined simply as a geometric shape and its manufacturing information to create the shape. In a feature-based process planning system, feature library that consists of pre-defined manufacturing features and the manufacturing information to create the shape of the features, plays an important role in the extraction of manufacturing features with their proper manufacturing information. However, to manage the manufacturing information flexibly, it is important to build a feature library that can be easily modified. In this paper, the implementation of Semantic Wiki for the development of the feature library is proposed. <p class="card-text"><strong>Keywords:</strong> <a href="https://publications.waset.org/search?q=Manufacturing%20feature" title="Manufacturing feature">Manufacturing feature</a>, <a href="https://publications.waset.org/search?q=feature%20library" title=" feature library"> feature library</a>, <a href="https://publications.waset.org/search?q=feature%0Aontology" title=" feature ontology"> feature ontology</a>, <a href="https://publications.waset.org/search?q=process%20planning" title=" process planning"> process planning</a>, <a href="https://publications.waset.org/search?q=Wiki" title=" Wiki"> Wiki</a>, <a href="https://publications.waset.org/search?q=MediaWiki" title=" MediaWiki"> MediaWiki</a>, <a href="https://publications.waset.org/search?q=Semantic%20Wiki." title=" Semantic Wiki."> Semantic Wiki.</a> </p> <a href="https://publications.waset.org/9813/development-of-a-semantic-wiki-based-feature-library-for-the-extraction-of-manufacturing-feature-and-manufacturing-information" class="btn btn-primary btn-sm">Procedia</a> <a href="https://publications.waset.org/9813/apa" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">APA</a> <a href="https://publications.waset.org/9813/bibtex" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">BibTeX</a> <a href="https://publications.waset.org/9813/chicago" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Chicago</a> <a href="https://publications.waset.org/9813/endnote" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">EndNote</a> <a href="https://publications.waset.org/9813/harvard" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Harvard</a> <a href="https://publications.waset.org/9813/json" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">JSON</a> <a href="https://publications.waset.org/9813/mla" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">MLA</a> <a href="https://publications.waset.org/9813/ris" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">RIS</a> <a href="https://publications.waset.org/9813/xml" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">XML</a> <a href="https://publications.waset.org/9813/iso690" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">ISO 690</a> <a href="https://publications.waset.org/9813.pdf" target="_blank" class="btn btn-primary btn-sm">PDF</a> <span class="bg-info text-light px-1 py-1 float-right rounded"> Downloads <span class="badge badge-light">1450</span> </span> </div> </div> <div class="card publication-listing mb-3 mt-3"> <h5 class="card-header" style="font-size:.9rem"><span class="badge badge-info">1606</span> Texture Feature-Based Language Identification Using Wavelet-Domain BDIP and BVLC Features and FFT Feature</h5> <div class="card-body"> <p class="card-text"><strong>Authors:</strong> <a href="https://publications.waset.org/search?q=Ick%20Hoon%20Jang">Ick Hoon Jang</a>, <a href="https://publications.waset.org/search?q=Hoon%20Jae%20Lee"> Hoon Jae Lee</a>, <a href="https://publications.waset.org/search?q=Dae%20Hoon%20Kwon"> Dae Hoon Kwon</a>, <a href="https://publications.waset.org/search?q=Ui%20Young%20Pak"> Ui Young Pak</a> </p> <p class="card-text"><strong>Abstract:</strong></p> In this paper, we propose a texture feature-based language identification using wavelet-domain BDIP (block difference of inverse probabilities) and BVLC (block variance of local correlation coefficients) features and FFT (fast Fourier transform) feature. In the proposed method, wavelet subbands are first obtained by wavelet transform from a test image and denoised by Donoho-s soft-thresholding. BDIP and BVLC operators are next applied to the wavelet subbands. FFT blocks are also obtained by 2D (twodimensional) FFT from the blocks into which the test image is partitioned. Some significant FFT coefficients in each block are selected and magnitude operator is applied to them. Moments for each subband of BDIP and BVLC and for each magnitude of significant FFT coefficients are then computed and fused into a feature vector. In classification, a stabilized Bayesian classifier, which adopts variance thresholding, searches the training feature vector most similar to the test feature vector. Experimental results show that the proposed method with the three operations yields excellent language identification even with rather low feature dimension. <p class="card-text"><strong>Keywords:</strong> <a href="https://publications.waset.org/search?q=BDIP" title="BDIP">BDIP</a>, <a href="https://publications.waset.org/search?q=BVLC" title=" BVLC"> BVLC</a>, <a href="https://publications.waset.org/search?q=FFT" title=" FFT"> FFT</a>, <a href="https://publications.waset.org/search?q=language%20identification" title=" language identification"> language identification</a>, <a href="https://publications.waset.org/search?q=texture%0Afeature" title=" texture feature"> texture feature</a>, <a href="https://publications.waset.org/search?q=wavelet%20transform." title=" wavelet transform."> wavelet transform.</a> </p> <a href="https://publications.waset.org/1363/texture-feature-based-language-identification-using-wavelet-domain-bdip-and-bvlc-features-and-fft-feature" class="btn btn-primary btn-sm">Procedia</a> <a href="https://publications.waset.org/1363/apa" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">APA</a> <a href="https://publications.waset.org/1363/bibtex" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">BibTeX</a> <a href="https://publications.waset.org/1363/chicago" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Chicago</a> <a href="https://publications.waset.org/1363/endnote" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">EndNote</a> <a href="https://publications.waset.org/1363/harvard" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Harvard</a> <a href="https://publications.waset.org/1363/json" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">JSON</a> <a href="https://publications.waset.org/1363/mla" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">MLA</a> <a href="https://publications.waset.org/1363/ris" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">RIS</a> <a href="https://publications.waset.org/1363/xml" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">XML</a> <a href="https://publications.waset.org/1363/iso690" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">ISO 690</a> <a href="https://publications.waset.org/1363.pdf" target="_blank" class="btn btn-primary btn-sm">PDF</a> <span class="bg-info text-light px-1 py-1 float-right rounded"> Downloads <span class="badge badge-light">2160</span> </span> </div> </div> <div class="card publication-listing mb-3 mt-3"> <h5 class="card-header" style="font-size:.9rem"><span class="badge badge-info">1605</span> Myanmar Character Recognition Using Eight Direction Chain Code Frequency Features </h5> <div class="card-body"> <p class="card-text"><strong>Authors:</strong> <a href="https://publications.waset.org/search?q=Kyi%20Pyar%20Zaw">Kyi Pyar Zaw</a>, <a href="https://publications.waset.org/search?q=Zin%20Mar%20Kyu"> Zin Mar Kyu </a> </p> <p class="card-text"><strong>Abstract:</strong></p> <p>Character recognition is the process of converting a text image file into editable and searchable text file. Feature Extraction is the heart of any character recognition system. The character recognition rate may be low or high depending on the extracted features. In the proposed paper, 25 features for one character are used in character recognition. Basically, there are three steps of character recognition such as character segmentation, feature extraction and classification. In segmentation step, horizontal cropping method is used for line segmentation and vertical cropping method is used for character segmentation. In the Feature extraction step, features are extracted in two ways. The first way is that the 8 features are extracted from the entire input character using eight direction chain code frequency extraction. The second way is that the input character is divided into 16 blocks. For each block, although 8 feature values are obtained through eight-direction chain code frequency extraction method, we define the sum of these 8 feature values as a feature for one block. Therefore, 16 features are extracted from that 16 blocks in the second way. We use the number of holes feature to cluster the similar characters. We can recognize the almost Myanmar common characters with various font sizes by using these features. All these 25 features are used in both training part and testing part. In the classification step, the characters are classified by matching the all features of input character with already trained features of characters.</p> <p class="card-text"><strong>Keywords:</strong> <a href="https://publications.waset.org/search?q=Chain%20code%20frequency" title="Chain code frequency">Chain code frequency</a>, <a href="https://publications.waset.org/search?q=character%20recognition" title=" character recognition"> character recognition</a>, <a href="https://publications.waset.org/search?q=feature%20extraction" title=" feature extraction"> feature extraction</a>, <a href="https://publications.waset.org/search?q=features%20matching" title=" features matching"> features matching</a>, <a href="https://publications.waset.org/search?q=segmentation." title=" segmentation."> segmentation.</a> </p> <a href="https://publications.waset.org/10009080/myanmar-character-recognition-using-eight-direction-chain-code-frequency-features" class="btn btn-primary btn-sm">Procedia</a> <a href="https://publications.waset.org/10009080/apa" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">APA</a> <a href="https://publications.waset.org/10009080/bibtex" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">BibTeX</a> <a href="https://publications.waset.org/10009080/chicago" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Chicago</a> <a href="https://publications.waset.org/10009080/endnote" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">EndNote</a> <a href="https://publications.waset.org/10009080/harvard" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Harvard</a> <a href="https://publications.waset.org/10009080/json" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">JSON</a> <a href="https://publications.waset.org/10009080/mla" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">MLA</a> <a href="https://publications.waset.org/10009080/ris" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">RIS</a> <a href="https://publications.waset.org/10009080/xml" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">XML</a> <a href="https://publications.waset.org/10009080/iso690" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">ISO 690</a> <a href="https://publications.waset.org/10009080.pdf" target="_blank" class="btn btn-primary btn-sm">PDF</a> <span class="bg-info text-light px-1 py-1 float-right rounded"> Downloads <span class="badge badge-light">767</span> </span> </div> </div> <div class="card publication-listing mb-3 mt-3"> <h5 class="card-header" style="font-size:.9rem"><span class="badge badge-info">1604</span> An Optimal Feature Subset Selection for Leaf Analysis</h5> <div class="card-body"> <p class="card-text"><strong>Authors:</strong> <a href="https://publications.waset.org/search?q=N.%20Valliammal">N. Valliammal</a>, <a href="https://publications.waset.org/search?q=S.N.%20Geethalakshmi"> S.N. Geethalakshmi</a> </p> <p class="card-text"><strong>Abstract:</strong></p> This paper describes an optimal approach for feature subset selection to classify the leaves based on Genetic Algorithm (GA) and Kernel Based Principle Component Analysis (KPCA). Due to high complexity in the selection of the optimal features, the classification has become a critical task to analyse the leaf image data. Initially the shape, texture and colour features are extracted from the leaf images. These extracted features are optimized through the separate functioning of GA and KPCA. This approach performs an intersection operation over the subsets obtained from the optimization process. Finally, the most common matching subset is forwarded to train the Support Vector Machine (SVM). Our experimental results successfully prove that the application of GA and KPCA for feature subset selection using SVM as a classifier is computationally effective and improves the accuracy of the classifier. <p class="card-text"><strong>Keywords:</strong> <a href="https://publications.waset.org/search?q=Optimization" title="Optimization">Optimization</a>, <a href="https://publications.waset.org/search?q=Feature%20extraction" title=" Feature extraction"> Feature extraction</a>, <a href="https://publications.waset.org/search?q=Feature%20subset" title=" Feature subset"> Feature subset</a>, <a href="https://publications.waset.org/search?q=Classification" title=" Classification"> Classification</a>, <a href="https://publications.waset.org/search?q=GA" title=" GA"> GA</a>, <a href="https://publications.waset.org/search?q=KPCA" title=" KPCA"> KPCA</a>, <a href="https://publications.waset.org/search?q=SVM%20and%20Computation" title=" SVM and Computation"> SVM and Computation</a> </p> <a href="https://publications.waset.org/2535/an-optimal-feature-subset-selection-for-leaf-analysis" class="btn btn-primary btn-sm">Procedia</a> <a href="https://publications.waset.org/2535/apa" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">APA</a> <a href="https://publications.waset.org/2535/bibtex" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">BibTeX</a> <a href="https://publications.waset.org/2535/chicago" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Chicago</a> <a href="https://publications.waset.org/2535/endnote" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">EndNote</a> <a href="https://publications.waset.org/2535/harvard" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Harvard</a> <a href="https://publications.waset.org/2535/json" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">JSON</a> <a href="https://publications.waset.org/2535/mla" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">MLA</a> <a href="https://publications.waset.org/2535/ris" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">RIS</a> <a href="https://publications.waset.org/2535/xml" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">XML</a> <a href="https://publications.waset.org/2535/iso690" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">ISO 690</a> <a href="https://publications.waset.org/2535.pdf" target="_blank" class="btn btn-primary btn-sm">PDF</a> <span class="bg-info text-light px-1 py-1 float-right rounded"> Downloads <span class="badge badge-light">2260</span> </span> </div> </div> <div class="card publication-listing mb-3 mt-3"> <h5 class="card-header" style="font-size:.9rem"><span class="badge badge-info">1603</span> Scale-Space Volume Descriptors for Automatic 3D Facial Feature Extraction</h5> <div class="card-body"> <p class="card-text"><strong>Authors:</strong> <a href="https://publications.waset.org/search?q=Daniel%20Chen">Daniel Chen</a>, <a href="https://publications.waset.org/search?q=George%20Mamic"> George Mamic</a>, <a href="https://publications.waset.org/search?q=Clinton%20Fookes"> Clinton Fookes</a>, <a href="https://publications.waset.org/search?q=Sridha%20Sridharan"> Sridha Sridharan</a> </p> <p class="card-text"><strong>Abstract:</strong></p> <p>An automatic method for the extraction of feature points for face based applications is proposed. The system is based upon volumetric feature descriptors, which in this paper has been extended to incorporate scale space. The method is robust to noise and has the ability to extract local and holistic features simultaneously from faces stored in a database. Extracted features are stable over a range of faces, with results indicating that in terms of intra-ID variability, the technique has the ability to outperform manual landmarking.</p> <p class="card-text"><strong>Keywords:</strong> <a href="https://publications.waset.org/search?q=Scale%20space%20volume%20descriptor" title="Scale space volume descriptor">Scale space volume descriptor</a>, <a href="https://publications.waset.org/search?q=feature%20extraction" title=" feature extraction"> feature extraction</a>, <a href="https://publications.waset.org/search?q=3D%20facial%20landmarking" title=" 3D facial landmarking"> 3D facial landmarking</a> </p> <a href="https://publications.waset.org/1712/scale-space-volume-descriptors-for-automatic-3d-facial-feature-extraction" class="btn btn-primary btn-sm">Procedia</a> <a href="https://publications.waset.org/1712/apa" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">APA</a> <a href="https://publications.waset.org/1712/bibtex" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">BibTeX</a> <a href="https://publications.waset.org/1712/chicago" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Chicago</a> <a href="https://publications.waset.org/1712/endnote" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">EndNote</a> <a href="https://publications.waset.org/1712/harvard" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Harvard</a> <a href="https://publications.waset.org/1712/json" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">JSON</a> <a href="https://publications.waset.org/1712/mla" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">MLA</a> <a href="https://publications.waset.org/1712/ris" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">RIS</a> <a href="https://publications.waset.org/1712/xml" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">XML</a> <a href="https://publications.waset.org/1712/iso690" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">ISO 690</a> <a href="https://publications.waset.org/1712.pdf" target="_blank" class="btn btn-primary btn-sm">PDF</a> <span class="bg-info text-light px-1 py-1 float-right rounded"> Downloads <span class="badge badge-light">1516</span> </span> </div> </div> <div class="card publication-listing mb-3 mt-3"> <h5 class="card-header" style="font-size:.9rem"><span class="badge badge-info">1602</span> Towards Integrating Statistical Color Features for Human Skin Detection</h5> <div class="card-body"> <p class="card-text"><strong>Authors:</strong> <a href="https://publications.waset.org/search?q=Mohd%20Zamri%20Osman">Mohd Zamri Osman</a>, <a href="https://publications.waset.org/search?q=Mohd%20Aizaini%20Maarof"> Mohd Aizaini Maarof</a>, <a href="https://publications.waset.org/search?q=Mohd%20Foad%20Rohani"> Mohd Foad Rohani</a> </p> <p class="card-text"><strong>Abstract:</strong></p> <p>Human skin detection recognized as the primary step in most of the applications such as face detection, illicit image filtering, hand recognition and video surveillance. The performance of any skin detection applications greatly relies on the two components: feature extraction and classification method. Skin color is the most vital information used for skin detection purpose. However, color feature alone sometimes could not handle images with having same color distribution with skin color. A color feature of pixel-based does not eliminate the skin-like color due to the intensity of skin and skin-like color fall under the same distribution. Hence, the statistical color analysis will be exploited such mean and standard deviation as an additional feature to increase the reliability of skin detector. In this paper, we studied the effectiveness of statistical color feature for human skin detection. Furthermore, the paper analyzed the integrated color and texture using eight classifiers with three color spaces of <em>RGB</em>, <em>YCbCr,</em> and <em>HSV</em>. The experimental results show that the integrating statistical feature using Random Forest classifier achieved a significant performance with an F1-score 0.969.</p> <p class="card-text"><strong>Keywords:</strong> <a href="https://publications.waset.org/search?q=Color%20space" title="Color space">Color space</a>, <a href="https://publications.waset.org/search?q=neural%20network" title=" neural network"> neural network</a>, <a href="https://publications.waset.org/search?q=random%20forest" title=" random forest"> random forest</a>, <a href="https://publications.waset.org/search?q=skin%20detection" title=" skin detection"> skin detection</a>, <a href="https://publications.waset.org/search?q=statistical%20feature." title=" statistical feature."> statistical feature.</a> </p> <a href="https://publications.waset.org/10003677/towards-integrating-statistical-color-features-for-human-skin-detection" class="btn btn-primary btn-sm">Procedia</a> <a href="https://publications.waset.org/10003677/apa" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">APA</a> <a href="https://publications.waset.org/10003677/bibtex" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">BibTeX</a> <a href="https://publications.waset.org/10003677/chicago" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Chicago</a> <a href="https://publications.waset.org/10003677/endnote" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">EndNote</a> <a href="https://publications.waset.org/10003677/harvard" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Harvard</a> <a href="https://publications.waset.org/10003677/json" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">JSON</a> <a href="https://publications.waset.org/10003677/mla" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">MLA</a> <a href="https://publications.waset.org/10003677/ris" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">RIS</a> <a href="https://publications.waset.org/10003677/xml" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">XML</a> <a href="https://publications.waset.org/10003677/iso690" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">ISO 690</a> <a href="https://publications.waset.org/10003677.pdf" target="_blank" class="btn btn-primary btn-sm">PDF</a> <span class="bg-info text-light px-1 py-1 float-right rounded"> Downloads <span class="badge badge-light">1975</span> </span> </div> </div> <div class="card publication-listing mb-3 mt-3"> <h5 class="card-header" style="font-size:.9rem"><span class="badge badge-info">1601</span> Motion Recognition Based On Fuzzy WP Feature Extraction Approach</h5> <div class="card-body"> <p class="card-text"><strong>Authors:</strong> <a href="https://publications.waset.org/search?q=Keun-Chang%20Kwak">Keun-Chang Kwak</a> </p> <p class="card-text"><strong>Abstract:</strong></p> <p>This paper is concerned with motion recognition based fuzzy WP(Wavelet Packet) feature extraction approach from Vicon physical data sets. For this purpose, we use an efficient fuzzy mutual-information-based WP transform for feature extraction. This method estimates the required mutual information using a novel approach based on fuzzy membership function. The physical action data set includes 10 normal and 10 aggressive physical actions that measure the human activity. The data have been collected from 10 subjects using the Vicon 3D tracker. The experiments consist of running, seating, and walking as physical activity motion among various activities. The experimental results revealed that the presented feature extraction approach showed good recognition performance.</p> <p class="card-text"><strong>Keywords:</strong> <a href="https://publications.waset.org/search?q=Motion%20recognition" title="Motion recognition">Motion recognition</a>, <a href="https://publications.waset.org/search?q=fuzzy%20wavelet%20packet" title=" fuzzy wavelet packet"> fuzzy wavelet packet</a>, <a href="https://publications.waset.org/search?q=Vicon%0D%0Aphysical%20data." title=" Vicon physical data."> Vicon physical data.</a> </p> <a href="https://publications.waset.org/4617/motion-recognition-based-on-fuzzy-wp-feature-extraction-approach" class="btn btn-primary btn-sm">Procedia</a> <a href="https://publications.waset.org/4617/apa" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">APA</a> <a href="https://publications.waset.org/4617/bibtex" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">BibTeX</a> <a href="https://publications.waset.org/4617/chicago" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Chicago</a> <a href="https://publications.waset.org/4617/endnote" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">EndNote</a> <a href="https://publications.waset.org/4617/harvard" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Harvard</a> <a href="https://publications.waset.org/4617/json" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">JSON</a> <a href="https://publications.waset.org/4617/mla" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">MLA</a> <a href="https://publications.waset.org/4617/ris" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">RIS</a> <a href="https://publications.waset.org/4617/xml" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">XML</a> <a href="https://publications.waset.org/4617/iso690" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">ISO 690</a> <a href="https://publications.waset.org/4617.pdf" target="_blank" class="btn btn-primary btn-sm">PDF</a> <span class="bg-info text-light px-1 py-1 float-right rounded"> Downloads <span class="badge badge-light">1658</span> </span> </div> </div> <div class="card publication-listing mb-3 mt-3"> <h5 class="card-header" style="font-size:.9rem"><span class="badge badge-info">1600</span> Walsh-Hadamard Transform for Facial Feature Extraction in Face Recognition</h5> <div class="card-body"> <p class="card-text"><strong>Authors:</strong> <a href="https://publications.waset.org/search?q=M.%20Hassan">M. Hassan</a>, <a href="https://publications.waset.org/search?q=I.%20Osman"> I. Osman</a>, <a href="https://publications.waset.org/search?q=M.%20Yahia"> M. Yahia</a> </p> <p class="card-text"><strong>Abstract:</strong></p> <p>This Paper proposes a new facial feature extraction approach, Wash-Hadamard Transform (WHT). This approach is based on correlation between local pixels of the face image. Its primary advantage is the simplicity of its computation. The paper compares the proposed approach, WHT, which was traditionally used in data compression with two other known approaches: the Principal Component Analysis (PCA) and the Discrete Cosine Transform (DCT) using the face database of Olivetti Research Laboratory (ORL). In spite of its simple computation, the proposed algorithm (WHT) gave very close results to those obtained by the PCA and DCT. This paper initiates the research into WHT and the family of frequency transforms and examines their suitability for feature extraction in face recognition applications.</p> <p class="card-text"><strong>Keywords:</strong> <a href="https://publications.waset.org/search?q=Face%20Recognition" title="Face Recognition">Face Recognition</a>, <a href="https://publications.waset.org/search?q=Facial%20Feature%20Extraction" title=" Facial Feature Extraction"> Facial Feature Extraction</a>, <a href="https://publications.waset.org/search?q=Principal%20Component%20Analysis" title=" Principal Component Analysis"> Principal Component Analysis</a>, <a href="https://publications.waset.org/search?q=and%20Discrete%20Cosine%20Transform" title=" and Discrete Cosine Transform"> and Discrete Cosine Transform</a>, <a href="https://publications.waset.org/search?q=Wash-Hadamard%20Transform." title=" Wash-Hadamard Transform."> Wash-Hadamard Transform.</a> </p> <a href="https://publications.waset.org/2475/walsh-hadamard-transform-for-facial-feature-extraction-in-face-recognition" class="btn btn-primary btn-sm">Procedia</a> <a href="https://publications.waset.org/2475/apa" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">APA</a> <a href="https://publications.waset.org/2475/bibtex" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">BibTeX</a> <a href="https://publications.waset.org/2475/chicago" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Chicago</a> <a href="https://publications.waset.org/2475/endnote" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">EndNote</a> <a href="https://publications.waset.org/2475/harvard" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Harvard</a> <a href="https://publications.waset.org/2475/json" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">JSON</a> <a href="https://publications.waset.org/2475/mla" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">MLA</a> <a href="https://publications.waset.org/2475/ris" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">RIS</a> <a href="https://publications.waset.org/2475/xml" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">XML</a> <a href="https://publications.waset.org/2475/iso690" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">ISO 690</a> <a href="https://publications.waset.org/2475.pdf" target="_blank" class="btn btn-primary btn-sm">PDF</a> <span class="bg-info text-light px-1 py-1 float-right rounded"> Downloads <span class="badge badge-light">2578</span> </span> </div> </div> <div class="card publication-listing mb-3 mt-3"> <h5 class="card-header" style="font-size:.9rem"><span class="badge badge-info">1599</span> Feature Reduction of Nearest Neighbor Classifiers using Genetic Algorithm</h5> <div class="card-body"> <p class="card-text"><strong>Authors:</strong> <a href="https://publications.waset.org/search?q=M.%20Analoui">M. Analoui</a>, <a href="https://publications.waset.org/search?q=M.%20Fadavi%20Amiri"> M. Fadavi Amiri</a> </p> <p class="card-text"><strong>Abstract:</strong></p> The design of a pattern classifier includes an attempt to select, among a set of possible features, a minimum subset of weakly correlated features that better discriminate the pattern classes. This is usually a difficult task in practice, normally requiring the application of heuristic knowledge about the specific problem domain. The selection and quality of the features representing each pattern have a considerable bearing on the success of subsequent pattern classification. Feature extraction is the process of deriving new features from the original features in order to reduce the cost of feature measurement, increase classifier efficiency, and allow higher classification accuracy. Many current feature extraction techniques involve linear transformations of the original pattern vectors to new vectors of lower dimensionality. While this is useful for data visualization and increasing classification efficiency, it does not necessarily reduce the number of features that must be measured since each new feature may be a linear combination of all of the features in the original pattern vector. In this paper a new approach is presented to feature extraction in which feature selection, feature extraction, and classifier training are performed simultaneously using a genetic algorithm. In this approach each feature value is first normalized by a linear equation, then scaled by the associated weight prior to training, testing, and classification. A knn classifier is used to evaluate each set of feature weights. The genetic algorithm optimizes a vector of feature weights, which are used to scale the individual features in the original pattern vectors in either a linear or a nonlinear fashion. By this approach, the number of features used in classifying can be finely reduced. <p class="card-text"><strong>Keywords:</strong> <a href="https://publications.waset.org/search?q=Feature%20reduction" title="Feature reduction">Feature reduction</a>, <a href="https://publications.waset.org/search?q=genetic%20algorithm" title=" genetic algorithm"> genetic algorithm</a>, <a href="https://publications.waset.org/search?q=pattern%0Aclassification" title=" pattern classification"> pattern classification</a>, <a href="https://publications.waset.org/search?q=nearest%20neighbor%20rule%20classifiers%20%28k-NNR%29." title=" nearest neighbor rule classifiers (k-NNR)."> nearest neighbor rule classifiers (k-NNR).</a> </p> <a href="https://publications.waset.org/6432/feature-reduction-of-nearest-neighbor-classifiers-using-genetic-algorithm" class="btn btn-primary btn-sm">Procedia</a> <a href="https://publications.waset.org/6432/apa" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">APA</a> <a href="https://publications.waset.org/6432/bibtex" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">BibTeX</a> <a href="https://publications.waset.org/6432/chicago" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Chicago</a> <a href="https://publications.waset.org/6432/endnote" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">EndNote</a> <a href="https://publications.waset.org/6432/harvard" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Harvard</a> <a href="https://publications.waset.org/6432/json" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">JSON</a> <a href="https://publications.waset.org/6432/mla" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">MLA</a> <a href="https://publications.waset.org/6432/ris" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">RIS</a> <a href="https://publications.waset.org/6432/xml" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">XML</a> <a href="https://publications.waset.org/6432/iso690" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">ISO 690</a> <a href="https://publications.waset.org/6432.pdf" target="_blank" class="btn btn-primary btn-sm">PDF</a> <span class="bg-info text-light px-1 py-1 float-right rounded"> Downloads <span class="badge badge-light">1780</span> </span> </div> </div> <div class="card publication-listing mb-3 mt-3"> <h5 class="card-header" style="font-size:.9rem"><span class="badge badge-info">1598</span> Investigation on Feature Extraction and Classification of Medical Images</h5> <div class="card-body"> <p class="card-text"><strong>Authors:</strong> <a href="https://publications.waset.org/search?q=P.%20Gnanasekar">P. Gnanasekar</a>, <a href="https://publications.waset.org/search?q=A.%20Nagappan"> A. Nagappan</a>, <a href="https://publications.waset.org/search?q=S.%20Sharavanan"> S. Sharavanan</a>, <a href="https://publications.waset.org/search?q=O.%20Saravanan"> O. Saravanan</a>, <a href="https://publications.waset.org/search?q=D.%20Vinodkumar"> D. Vinodkumar</a>, <a href="https://publications.waset.org/search?q=T.%20Elayabharathi"> T. Elayabharathi</a>, <a href="https://publications.waset.org/search?q=G.%20Karthik"> G. Karthik</a> </p> <p class="card-text"><strong>Abstract:</strong></p> In this paper we present the deep study about the Bio- Medical Images and tag it with some basic extracting features (e.g. color, pixel value etc). The classification is done by using a nearest neighbor classifier with various distance measures as well as the automatic combination of classifier results. This process selects a subset of relevant features from a group of features of the image. It also helps to acquire better understanding about the image by describing which the important features are. The accuracy can be improved by increasing the number of features selected. Various types of classifications were evolved for the medical images like Support Vector Machine (SVM) which is used for classifying the Bacterial types. Ant Colony Optimization method is used for optimal results. It has high approximation capability and much faster convergence, Texture feature extraction method based on Gabor wavelets etc.. <p class="card-text"><strong>Keywords:</strong> <a href="https://publications.waset.org/search?q=ACO%20Ant%20Colony%20Optimization" title="ACO Ant Colony Optimization">ACO Ant Colony Optimization</a>, <a href="https://publications.waset.org/search?q=Correlogram" title=" Correlogram"> Correlogram</a>, <a href="https://publications.waset.org/search?q=CCM%0ACo-Occurrence%20Matrix" title=" CCM Co-Occurrence Matrix"> CCM Co-Occurrence Matrix</a>, <a href="https://publications.waset.org/search?q=RTS%20Rough-Set%20theory" title=" RTS Rough-Set theory"> RTS Rough-Set theory</a> </p> <a href="https://publications.waset.org/11279/investigation-on-feature-extraction-and-classification-of-medical-images" class="btn btn-primary btn-sm">Procedia</a> <a href="https://publications.waset.org/11279/apa" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">APA</a> <a href="https://publications.waset.org/11279/bibtex" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">BibTeX</a> <a href="https://publications.waset.org/11279/chicago" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Chicago</a> <a href="https://publications.waset.org/11279/endnote" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">EndNote</a> <a href="https://publications.waset.org/11279/harvard" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Harvard</a> <a href="https://publications.waset.org/11279/json" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">JSON</a> <a href="https://publications.waset.org/11279/mla" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">MLA</a> <a href="https://publications.waset.org/11279/ris" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">RIS</a> <a href="https://publications.waset.org/11279/xml" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">XML</a> <a href="https://publications.waset.org/11279/iso690" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">ISO 690</a> <a href="https://publications.waset.org/11279.pdf" target="_blank" class="btn btn-primary btn-sm">PDF</a> <span class="bg-info text-light px-1 py-1 float-right rounded"> Downloads <span class="badge badge-light">3025</span> </span> </div> </div> <div class="card publication-listing mb-3 mt-3"> <h5 class="card-header" style="font-size:.9rem"><span class="badge badge-info">1597</span> Texture Feature Extraction of Infrared River Ice Images using Second-Order Spatial Statistics</h5> <div class="card-body"> <p class="card-text"><strong>Authors:</strong> <a href="https://publications.waset.org/search?q=Bharathi%20P.%20T">Bharathi P. T</a>, <a href="https://publications.waset.org/search?q=P.%20Subashini"> P. Subashini</a> </p> <p class="card-text"><strong>Abstract:</strong></p> <p>Ice cover County has a significant impact on rivers as it affects with the ice melting capacity which results in flooding, restrict navigation, modify the ecosystem and microclimate. River ices are made up of different ice types with varying ice thickness, so surveillance of river ice plays an important role. River ice types are captured using infrared imaging camera which captures the images even during the night times. In this paper the river ice infrared texture images are analysed using first-order statistical methods and secondorder statistical methods. The second order statistical methods considered are spatial gray level dependence method, gray level run length method and gray level difference method. The performance of the feature extraction methods are evaluated by using Probabilistic Neural Network classifier and it is found that the first-order statistical method and second-order statistical method yields low accuracy. So the features extracted from the first-order statistical method and second-order statistical method are combined and it is observed that the result of these combined features (First order statistical method + gray level run length method) provides higher accuracy when compared with the features from the first-order statistical method and second-order statistical method alone.</p> <p class="card-text"><strong>Keywords:</strong> <a href="https://publications.waset.org/search?q=Gray%20Level%20Difference%20Method" title="Gray Level Difference Method">Gray Level Difference Method</a>, <a href="https://publications.waset.org/search?q=Gray%20Level%20Run%0D%0ALength%20Method" title=" Gray Level Run Length Method"> Gray Level Run Length Method</a>, <a href="https://publications.waset.org/search?q=Kurtosis" title=" Kurtosis"> Kurtosis</a>, <a href="https://publications.waset.org/search?q=Probabilistic%20Neural%20Network" title=" Probabilistic Neural Network"> Probabilistic Neural Network</a>, <a href="https://publications.waset.org/search?q=Skewness" title=" Skewness"> Skewness</a>, <a href="https://publications.waset.org/search?q=Spatial%20Gray%20Level%20Dependence%20Method." title=" Spatial Gray Level Dependence Method."> Spatial Gray Level Dependence Method.</a> </p> <a href="https://publications.waset.org/14822/texture-feature-extraction-of-infrared-river-ice-images-using-second-order-spatial-statistics" class="btn btn-primary btn-sm">Procedia</a> <a href="https://publications.waset.org/14822/apa" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">APA</a> <a href="https://publications.waset.org/14822/bibtex" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">BibTeX</a> <a href="https://publications.waset.org/14822/chicago" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Chicago</a> <a href="https://publications.waset.org/14822/endnote" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">EndNote</a> <a href="https://publications.waset.org/14822/harvard" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Harvard</a> <a href="https://publications.waset.org/14822/json" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">JSON</a> <a href="https://publications.waset.org/14822/mla" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">MLA</a> <a href="https://publications.waset.org/14822/ris" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">RIS</a> <a href="https://publications.waset.org/14822/xml" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">XML</a> <a href="https://publications.waset.org/14822/iso690" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">ISO 690</a> <a href="https://publications.waset.org/14822.pdf" target="_blank" class="btn btn-primary btn-sm">PDF</a> <span class="bg-info text-light px-1 py-1 float-right rounded"> Downloads <span class="badge badge-light">2925</span> </span> </div> </div> <div class="card publication-listing mb-3 mt-3"> <h5 class="card-header" style="font-size:.9rem"><span class="badge badge-info">1596</span> Development of a Wiki-based Feature Library for a Process Planning System</h5> <div class="card-body"> <p class="card-text"><strong>Authors:</strong> <a href="https://publications.waset.org/search?q=Hendry%20Muljadi">Hendry Muljadi</a>, <a href="https://publications.waset.org/search?q=Hideaki%20Takeda"> Hideaki Takeda</a>, <a href="https://publications.waset.org/search?q=Koichi%20Ando"> Koichi Ando</a> </p> <p class="card-text"><strong>Abstract:</strong></p> A manufacturing feature can be defined simply as a geometric shape and its manufacturing information to create the shape. In a feature-based process planning system, feature library plays an important role in the extraction of manufacturing features with their proper manufacturing information. However, to manage the manufacturing information flexibly, it is important to build a feature library that is easy to modify. In this paper, a Wiki-based feature library is proposed. <p class="card-text"><strong>Keywords:</strong> <a href="https://publications.waset.org/search?q=Manufacturing%20feature" title="Manufacturing feature">Manufacturing feature</a>, <a href="https://publications.waset.org/search?q=feature%20library" title=" feature library"> feature library</a>, <a href="https://publications.waset.org/search?q=feature%0Aontology" title=" feature ontology"> feature ontology</a>, <a href="https://publications.waset.org/search?q=process%20planning" title=" process planning"> process planning</a>, <a href="https://publications.waset.org/search?q=Wiki" title=" Wiki"> Wiki</a>, <a href="https://publications.waset.org/search?q=MediaWiki." title=" MediaWiki."> MediaWiki.</a> </p> <a href="https://publications.waset.org/11944/development-of-a-wiki-based-feature-library-for-a-process-planning-system" class="btn btn-primary btn-sm">Procedia</a> <a href="https://publications.waset.org/11944/apa" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">APA</a> <a href="https://publications.waset.org/11944/bibtex" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">BibTeX</a> <a href="https://publications.waset.org/11944/chicago" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Chicago</a> <a href="https://publications.waset.org/11944/endnote" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">EndNote</a> <a href="https://publications.waset.org/11944/harvard" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Harvard</a> <a href="https://publications.waset.org/11944/json" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">JSON</a> <a href="https://publications.waset.org/11944/mla" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">MLA</a> <a href="https://publications.waset.org/11944/ris" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">RIS</a> <a href="https://publications.waset.org/11944/xml" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">XML</a> <a href="https://publications.waset.org/11944/iso690" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">ISO 690</a> <a href="https://publications.waset.org/11944.pdf" target="_blank" class="btn btn-primary btn-sm">PDF</a> <span class="bg-info text-light px-1 py-1 float-right rounded"> Downloads <span class="badge badge-light">1431</span> </span> </div> </div> <div class="card publication-listing mb-3 mt-3"> <h5 class="card-header" style="font-size:.9rem"><span class="badge badge-info">1595</span> Wavelet and K-L Seperability Based Feature Extraction Method for Functional Data Classification</h5> <div class="card-body"> <p class="card-text"><strong>Authors:</strong> <a href="https://publications.waset.org/search?q=Jun%20Wan">Jun Wan</a>, <a href="https://publications.waset.org/search?q=Zehua%20Chen"> Zehua Chen</a>, <a href="https://publications.waset.org/search?q=Yingwu%20Chen"> Yingwu Chen</a>, <a href="https://publications.waset.org/search?q=Zhidong%20Bai"> Zhidong Bai</a> </p> <p class="card-text"><strong>Abstract:</strong></p> This paper proposes a novel feature extraction method, based on Discrete Wavelet Transform (DWT) and K-L Seperability (KLS), for the classification of Functional Data (FD). This method combines the decorrelation and reduction property of DWT and the additive independence property of KLS, which is helpful to extraction classification features of FD. It is an advanced approach of the popular wavelet based shrinkage method for functional data reduction and classification. A theory analysis is given in the paper to prove the consistent convergence property, and a simulation study is also done to compare the proposed method with the former shrinkage ones. The experiment results show that this method has advantages in improving classification efficiency, precision and robustness. <p class="card-text"><strong>Keywords:</strong> <a href="https://publications.waset.org/search?q=classification" title="classification">classification</a>, <a href="https://publications.waset.org/search?q=functional%20data" title=" functional data"> functional data</a>, <a href="https://publications.waset.org/search?q=feature%20extraction" title=" feature extraction"> feature extraction</a>, <a href="https://publications.waset.org/search?q=K-Lseperability" title=" K-Lseperability"> K-Lseperability</a>, <a href="https://publications.waset.org/search?q=wavelet." title=" wavelet."> wavelet.</a> </p> <a href="https://publications.waset.org/6083/wavelet-and-k-l-seperability-based-feature-extraction-method-for-functional-data-classification" class="btn btn-primary btn-sm">Procedia</a> <a href="https://publications.waset.org/6083/apa" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">APA</a> <a href="https://publications.waset.org/6083/bibtex" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">BibTeX</a> <a href="https://publications.waset.org/6083/chicago" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Chicago</a> <a href="https://publications.waset.org/6083/endnote" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">EndNote</a> <a href="https://publications.waset.org/6083/harvard" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Harvard</a> <a href="https://publications.waset.org/6083/json" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">JSON</a> <a href="https://publications.waset.org/6083/mla" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">MLA</a> <a href="https://publications.waset.org/6083/ris" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">RIS</a> <a href="https://publications.waset.org/6083/xml" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">XML</a> <a href="https://publications.waset.org/6083/iso690" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">ISO 690</a> <a href="https://publications.waset.org/6083.pdf" target="_blank" class="btn btn-primary btn-sm">PDF</a> <span class="bg-info text-light px-1 py-1 float-right rounded"> Downloads <span class="badge badge-light">1475</span> </span> </div> </div> <div class="card publication-listing mb-3 mt-3"> <h5 class="card-header" style="font-size:.9rem"><span class="badge badge-info">1594</span> An Automatic Feature Extraction Technique for 2D Punch Shapes</h5> <div class="card-body"> <p class="card-text"><strong>Authors:</strong> <a href="https://publications.waset.org/search?q=Awais%20Ahmad%20Khan">Awais Ahmad Khan</a>, <a href="https://publications.waset.org/search?q=Emad%20Abouel%20Nasr"> Emad Abouel Nasr</a>, <a href="https://publications.waset.org/search?q=H.%20M.%20A.%20Hussein"> H. M. A. Hussein</a>, <a href="https://publications.waset.org/search?q=Abdulrahman%20Al-Ahmari"> Abdulrahman Al-Ahmari</a> </p> <p class="card-text"><strong>Abstract:</strong></p> <p>Sheet-metal parts have been widely applied in electronics, communication and mechanical industries in recent decades; but the advancement in sheet-metal part design and manufacturing is still behind in comparison with the increasing importance of sheet-metal parts in modern industry. This paper presents a methodology for automatic extraction of some common 2D internal sheet metal features. The features used in this study are taken from Unipunch ™ catalogue. The extraction process starts with the data extraction from STEP file using an object oriented approach and with the application of suitable algorithms and rules, all features contained in the catalogue are automatically extracted. Since the extracted features include geometry and engineering information, they will be effective for downstream application such as feature rebuilding and process planning.</p> <p class="card-text"><strong>Keywords:</strong> <a href="https://publications.waset.org/search?q=Feature%20Extraction" title="Feature Extraction">Feature Extraction</a>, <a href="https://publications.waset.org/search?q=Internal%20Features" title=" Internal Features"> Internal Features</a>, <a href="https://publications.waset.org/search?q=Punch%20Shapes" title=" Punch Shapes"> Punch Shapes</a>, <a href="https://publications.waset.org/search?q=Sheet%20metal" title=" Sheet metal"> Sheet metal</a>, <a href="https://publications.waset.org/search?q=STEP." title=" STEP."> STEP.</a> </p> <a href="https://publications.waset.org/10004369/an-automatic-feature-extraction-technique-for-2d-punch-shapes" class="btn btn-primary btn-sm">Procedia</a> <a href="https://publications.waset.org/10004369/apa" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">APA</a> <a href="https://publications.waset.org/10004369/bibtex" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">BibTeX</a> <a href="https://publications.waset.org/10004369/chicago" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Chicago</a> <a href="https://publications.waset.org/10004369/endnote" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">EndNote</a> <a href="https://publications.waset.org/10004369/harvard" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Harvard</a> <a href="https://publications.waset.org/10004369/json" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">JSON</a> <a href="https://publications.waset.org/10004369/mla" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">MLA</a> <a href="https://publications.waset.org/10004369/ris" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">RIS</a> <a href="https://publications.waset.org/10004369/xml" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">XML</a> <a href="https://publications.waset.org/10004369/iso690" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">ISO 690</a> <a href="https://publications.waset.org/10004369.pdf" target="_blank" class="btn btn-primary btn-sm">PDF</a> <span class="bg-info text-light px-1 py-1 float-right rounded"> Downloads <span class="badge badge-light">2104</span> </span> </div> </div> <div class="card publication-listing mb-3 mt-3"> <h5 class="card-header" style="font-size:.9rem"><span class="badge badge-info">1593</span> Automatic Extraction of Water Bodies Using Whole-R Method</h5> <div class="card-body"> <p class="card-text"><strong>Authors:</strong> <a href="https://publications.waset.org/search?q=Nikhat%20Nawaz">Nikhat Nawaz</a>, <a href="https://publications.waset.org/search?q=S.%20Srinivasulu"> S. Srinivasulu</a>, <a href="https://publications.waset.org/search?q=P.%20Kesava%20Rao"> P. Kesava Rao</a> </p> <p class="card-text"><strong>Abstract:</strong></p> <p>Feature extraction plays an important role in many remote sensing applications. Automatic extraction of water bodies is of great significance in many remote sensing applications like change detection, image retrieval etc. This paper presents a procedure for automatic extraction of water information from remote sensing images. The algorithm uses the relative location of R color component of the chromaticity diagram. This method is then integrated with the effectiveness of the spatial scale transformation of whole method. The whole method is based on water index fitted from spectral library. Experimental results demonstrate the improved accuracy and effectiveness of the integrated method for automatic extraction of water bodies.</p> <p class="card-text"><strong>Keywords:</strong> <a href="https://publications.waset.org/search?q=Chromaticity" title="Chromaticity">Chromaticity</a>, <a href="https://publications.waset.org/search?q=Feature%20Extraction" title=" Feature Extraction"> Feature Extraction</a>, <a href="https://publications.waset.org/search?q=Remote%20Sensing" title=" Remote Sensing"> Remote Sensing</a>, <a href="https://publications.waset.org/search?q=Spectral%20library" title=" Spectral library"> Spectral library</a>, <a href="https://publications.waset.org/search?q=Water%20Index." title=" Water Index."> Water Index.</a> </p> <a href="https://publications.waset.org/9996774/automatic-extraction-of-water-bodies-using-whole-r-method" class="btn btn-primary btn-sm">Procedia</a> <a href="https://publications.waset.org/9996774/apa" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">APA</a> <a href="https://publications.waset.org/9996774/bibtex" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">BibTeX</a> <a href="https://publications.waset.org/9996774/chicago" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Chicago</a> <a href="https://publications.waset.org/9996774/endnote" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">EndNote</a> <a href="https://publications.waset.org/9996774/harvard" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Harvard</a> <a href="https://publications.waset.org/9996774/json" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">JSON</a> <a href="https://publications.waset.org/9996774/mla" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">MLA</a> <a href="https://publications.waset.org/9996774/ris" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">RIS</a> <a href="https://publications.waset.org/9996774/xml" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">XML</a> <a href="https://publications.waset.org/9996774/iso690" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">ISO 690</a> <a href="https://publications.waset.org/9996774.pdf" target="_blank" class="btn btn-primary btn-sm">PDF</a> <span class="bg-info text-light px-1 py-1 float-right rounded"> Downloads <span class="badge badge-light">3383</span> </span> </div> </div> <div class="card publication-listing mb-3 mt-3"> <h5 class="card-header" style="font-size:.9rem"><span class="badge badge-info">1592</span> Detecting HCC Tumor in Three Phasic CT Liver Images with Optimization of Neural Network</h5> <div class="card-body"> <p class="card-text"><strong>Authors:</strong> <a href="https://publications.waset.org/search?q=Mahdieh%20Khalilinezhad">Mahdieh Khalilinezhad</a>, <a href="https://publications.waset.org/search?q=Silvana%20Dellepiane"> Silvana Dellepiane</a>, <a href="https://publications.waset.org/search?q=Gianni%20Vernazza"> Gianni Vernazza</a> </p> <p class="card-text"><strong>Abstract:</strong></p> <p>The aim of this work is to build a model based on tissue characterization that is able to discriminate pathological and non-pathological regions from three-phasic CT images. With our research and based on a feature selection in different phases, we are trying to design a neural network system with an optimal neuron number in a hidden layer. Our approach consists of three steps: feature selection, feature reduction, and classification. For each region of interest (ROI), 6 distinct sets of texture features are extracted such as: first order histogram parameters, absolute gradient, run-length matrix, co-occurrence matrix, autoregressive model, and wavelet, for a total of 270 texture features. When analyzing more phases, we show that the injection of liquid cause changes to the high relevant features in each region. Our results demonstrate that for detecting HCC tumor phase 3 is the best one in most of the features that we apply to the classification algorithm. The percentage of detection between pathology and healthy classes, according to our method, relates to first order histogram parameters with accuracy of 85% in phase 1, 95% in phase 2, and 95% in phase 3.</p> <p class="card-text"><strong>Keywords:</strong> <a href="https://publications.waset.org/search?q=Feature%20selection" title="Feature selection">Feature selection</a>, <a href="https://publications.waset.org/search?q=Multi-phasic%20liver%20images" title=" Multi-phasic liver images"> Multi-phasic liver images</a>, <a href="https://publications.waset.org/search?q=Neural%0D%0Anetwork" title=" Neural network"> Neural network</a>, <a href="https://publications.waset.org/search?q=Texture%20analysis." title=" Texture analysis."> Texture analysis.</a> </p> <a href="https://publications.waset.org/10000812/detecting-hcc-tumor-in-three-phasic-ct-liver-images-with-optimization-of-neural-network" class="btn btn-primary btn-sm">Procedia</a> <a href="https://publications.waset.org/10000812/apa" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">APA</a> <a href="https://publications.waset.org/10000812/bibtex" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">BibTeX</a> <a href="https://publications.waset.org/10000812/chicago" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Chicago</a> <a href="https://publications.waset.org/10000812/endnote" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">EndNote</a> <a href="https://publications.waset.org/10000812/harvard" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">Harvard</a> <a href="https://publications.waset.org/10000812/json" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">JSON</a> <a href="https://publications.waset.org/10000812/mla" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">MLA</a> <a href="https://publications.waset.org/10000812/ris" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">RIS</a> <a href="https://publications.waset.org/10000812/xml" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">XML</a> <a href="https://publications.waset.org/10000812/iso690" target="_blank" rel="nofollow" class="btn btn-primary btn-sm">ISO 690</a> <a href="https://publications.waset.org/10000812.pdf" target="_blank" class="btn btn-primary btn-sm">PDF</a> <span class="bg-info text-light px-1 py-1 float-right rounded"> Downloads <span class="badge badge-light">2546</span> </span> </div> </div> <ul class="pagination"> <li class="page-item disabled"><span class="page-link">‹</span></li> <li class="page-item active"><span class="page-link">1</span></li> <li class="page-item"><a class="page-link" href="https://publications.waset.org/search?q=texture%20feature%20extraction&page=2">2</a></li> <li class="page-item"><a class="page-link" href="https://publications.waset.org/search?q=texture%20feature%20extraction&page=3">3</a></li> <li class="page-item"><a class="page-link" href="https://publications.waset.org/search?q=texture%20feature%20extraction&page=4">4</a></li> <li class="page-item"><a class="page-link" href="https://publications.waset.org/search?q=texture%20feature%20extraction&page=5">5</a></li> <li class="page-item"><a class="page-link" href="https://publications.waset.org/search?q=texture%20feature%20extraction&page=6">6</a></li> <li class="page-item"><a class="page-link" href="https://publications.waset.org/search?q=texture%20feature%20extraction&page=7">7</a></li> <li class="page-item"><a class="page-link" href="https://publications.waset.org/search?q=texture%20feature%20extraction&page=8">8</a></li> <li class="page-item"><a class="page-link" href="https://publications.waset.org/search?q=texture%20feature%20extraction&page=9">9</a></li> <li class="page-item"><a class="page-link" href="https://publications.waset.org/search?q=texture%20feature%20extraction&page=10">10</a></li> <li class="page-item disabled"><span class="page-link">...</span></li> <li class="page-item"><a class="page-link" href="https://publications.waset.org/search?q=texture%20feature%20extraction&page=54">54</a></li> <li class="page-item"><a class="page-link" href="https://publications.waset.org/search?q=texture%20feature%20extraction&page=55">55</a></li> <li class="page-item"><a class="page-link" href="https://publications.waset.org/search?q=texture%20feature%20extraction&page=2" rel="next">›</a></li> </ul> </div> </main> <footer> <div id="infolinks" class="pt-3 pb-2"> <div class="container"> <div style="background-color:#f5f5f5;" class="p-3"> <div class="row"> <div class="col-md-2"> <ul class="list-unstyled"> About <li><a href="https://waset.org/page/support">About Us</a></li> <li><a href="https://waset.org/page/support#legal-information">Legal</a></li> <li><a target="_blank" rel="nofollow" href="https://publications.waset.org/static/files/WASET-16th-foundational-anniversary.pdf">WASET celebrates its 16th foundational anniversary</a></li> </ul> </div> <div class="col-md-2"> <ul class="list-unstyled"> Account <li><a href="https://waset.org/profile">My Account</a></li> </ul> </div> <div class="col-md-2"> <ul class="list-unstyled"> Explore <li><a href="https://waset.org/disciplines">Disciplines</a></li> <li><a href="https://waset.org/conferences">Conferences</a></li> <li><a href="https://waset.org/conference-programs">Conference Program</a></li> <li><a href="https://waset.org/committees">Committees</a></li> <li><a href="https://publications.waset.org">Publications</a></li> </ul> </div> <div class="col-md-2"> <ul class="list-unstyled"> Research <li><a href="https://publications.waset.org/abstracts">Abstracts</a></li> <li><a href="https://publications.waset.org">Periodicals</a></li> <li><a href="https://publications.waset.org/archive">Archive</a></li> </ul> </div> <div class="col-md-2"> <ul class="list-unstyled"> Open Science <li><a target="_blank" rel="nofollow" href="https://publications.waset.org/static/files/Open-Science-Philosophy.pdf">Open Science Philosophy</a></li> <li><a target="_blank" rel="nofollow" href="https://publications.waset.org/static/files/Open-Science-Award.pdf">Open Science Award</a></li> <li><a target="_blank" rel="nofollow" href="https://publications.waset.org/static/files/Open-Society-Open-Science-and-Open-Innovation.pdf">Open Innovation</a></li> <li><a target="_blank" rel="nofollow" href="https://publications.waset.org/static/files/Postdoctoral-Fellowship-Award.pdf">Postdoctoral Fellowship Award</a></li> <li><a target="_blank" rel="nofollow" href="https://publications.waset.org/static/files/Scholarly-Research-Review.pdf">Scholarly Research Review</a></li> </ul> </div> <div class="col-md-2"> <ul class="list-unstyled"> Support <li><a href="https://waset.org/page/support">Support</a></li> <li><a href="https://waset.org/profile/messages/create">Contact Us</a></li> <li><a href="https://waset.org/profile/messages/create">Report Abuse</a></li> </ul> </div> </div> </div> </div> </div> <div class="container text-center"> <hr style="margin-top:0;margin-bottom:.3rem;"> <a href="https://creativecommons.org/licenses/by/4.0/" target="_blank" class="text-muted small">Creative Commons Attribution 4.0 International License</a> <div id="copy" class="mt-2">© 2025 World Academy of Science, Engineering and Technology</div> </div> </footer> <a href="javascript:" id="return-to-top"><i class="fas fa-arrow-up"></i></a> <div class="modal" id="modal-template"> <div class="modal-dialog"> <div class="modal-content"> <div class="row m-0 mt-1"> <div class="col-md-12"> <button type="button" class="close" data-dismiss="modal" aria-label="Close"><span aria-hidden="true">×</span></button> </div> </div> <div class="modal-body"></div> </div> </div> </div> <script src="https://cdn.waset.org/static/plugins/jquery-3.3.1.min.js"></script> <script src="https://cdn.waset.org/static/plugins/bootstrap-4.2.1/js/bootstrap.bundle.min.js"></script> <script src="https://cdn.waset.org/static/js/site.js?v=150220211556"></script> <script> jQuery(document).ready(function() { /*jQuery.get("https://publications.waset.org/xhr/user-menu", function (response) { jQuery('#mainNavMenu').append(response); });*/ jQuery.get({ url: "https://publications.waset.org/xhr/user-menu", cache: false }).then(function(response){ jQuery('#mainNavMenu').append(response); }); }); </script> </body> </html>