2018年12月27日 星期四

WebAR 測試

Ref: https://aframe.io/blog/arjs/
asp.net core sdk 2.1 @ windows 10 x64
1. dotnet new web -o wk15b











2. Program.cs
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore;
using Microsoft.AspNetCore.Hosting;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging;

namespace wk15b
{
    public class Program
    {
        public static void Main(string[] args)
        {
            CreateWebHostBuilder(args).Build().Run();
        }

        public static IWebHostBuilder CreateWebHostBuilder(string[] args) =>
            WebHost.CreateDefaultBuilder(args)
.UseUrls("http://192.168.1.108:5000/","https://192.168.1.108:5001/")
                .UseStartup<Startup>();
    }
}
3. Startup.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Http;
using Microsoft.Extensions.DependencyInjection;

namespace wk15b
{
    public class Startup
    {
        // This method gets called by the runtime. Use this method to add services to the container.
        // For more information on how to configure your application, visit https://go.microsoft.com/fwlink/?LinkID=398940
        public void ConfigureServices(IServiceCollection services)
        {
services.AddMvc(); // Add
        }

        // This method gets called by the runtime. Use this method to configure the HTTP request pipeline.
        public void Configure(IApplicationBuilder app, IHostingEnvironment env)
        {
            if (env.IsDevelopment())
            {
                app.UseDeveloperExceptionPage();
            }
            else
            {
                app.UseExceptionHandler("/Error");
                app.UseHsts();
            }

            app.UseHttpsRedirection();
            app.UseStaticFiles();
            app.UseCookiePolicy();
            app.UseMvc();
        }
    }
}
4. wk1502.html
<!DOCTYPE html>
<html>
<head>
    <meta charset="utf-8">
    <script src="https://aframe.io/releases/0.8.0/aframe.min.js"></script>
    <script src="https://jeromeetienne.github.io/AR.js/aframe/build/aframe-ar.js"></script>
</head>
<body style='margin : 0px; overflow: hidden;'>
    <a-scene embedded arjs='sourceType: webcam;'>
        

        <!-- handle marker with hiro preset -->
        <a-marker preset='hiro'>
            <a-box position='0 2 -1' material='color: green;'></a-box>
        </a-marker>

      
    </a-scene>
</body>
</html>
5. Results


2018年12月25日 星期二

ML.NET Taxi Fare Prediction 測試

Ref: https://github.com/dotnet/machinelearning-samples/tree/master/samples/csharp/getting-started/Regression_TaxiFarePrediction

Visual Studio 2017 @ Windows 10 x64

1. New console project
2. nuget add ML.NET 0.8.0
3. nuget add PLplot
4. 合併成同一檔案:

5下載 資料檔案到 上一層 Data 目錄
taxi-fare-test.csv
taxi-fare-train.csv
6. 上一層建置MLModels
7. Result

2018年12月19日 星期三

ML.NET by Desktop Console

1, 參考 ML.NET 案例,採用SDK Core Console App.  改為Desktop App
2. Errors













2-1 AnyCPU ==> x64 or x86 Sel x64
2-2 建置/進階 ==> C#7.1


2018年12月11日 星期二

Emgu FacialMouseControl by Kinect CAM

Ref: https://github.com/emgucv/emgucv/tree/master/Emgu.CV.Example/FacialMouseControl
Windows 10 x64 @ ASUS X450J + Visual Studio 2017 +  Emgu 3.2.0.2721 + Kinect 1.8 SDK
1. MainWindow.xaml.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Data;
using System.Windows.Documents;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Media.Imaging;
using System.Windows.Navigation;
using System.Windows.Shapes;

namespace wk1302
{
    // 1. nuget emgu 3.4.3
    // 2. Add using ref FaceMouse
    using Emgu.CV;
    using Emgu.CV.Structure;
    using Emgu.Util;
    using System.Threading;
    using System.Runtime.InteropServices;
    using System.Windows.Interop;
    //-- Kinect cam
    using Microsoft.Kinect;

    /// <summary>
    /// MainWindow.xaml 的互動邏輯
    /// </summary>
    public partial class MainWindow : Window
    {
        private VideoCapture _capture;
        // https://stackoverflow.com/questions/46410342/c-sharp-emgu-could-not-be-found-capture-and-haarcascade
        //private HaarCascade _face;
        private CascadeClassifier _face;
        private KinectSensor sensor;
        /// <summary>
        /// Bitmap that will hold color information
        /// </summary>
        private WriteableBitmap colorBitmap;
        /// <summary>
        /// Intermediate storage for the color data received from the camera
        /// </summary>
        private byte[] colorPixels;
        public MainWindow()
        {
            InitializeComponent();
            _face = new CascadeClassifier("haarcascade_frontalface_alt2.xml");
            sensor = KinectSensor.KinectSensors[0];
            this.sensor.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30);
            // Allocate space to put the pixels we'll receive
            this.colorPixels = new byte[this.sensor.ColorStream.FramePixelDataLength];
            // This is the bitmap we'll display on-screen
            this.colorBitmap = new WriteableBitmap(this.sensor.ColorStream.FrameWidth, this.sensor.ColorStream.FrameHeight, 96.0, 96.0, PixelFormats.Bgr32, null);
            // Set the image we display to point to the bitmap where we'll put the image data
            this.image1.Source = this.colorBitmap;
            // Add an event handler to be called whenever there is new color frame data
            this.sensor.ColorFrameReady += this.SensorColorFrameReady;
            // Start the sensor!
           this.sensor.Start();
            // https://stackoverflow.com/questions/1111615/getting-inactivity-idle-time-in-a-wpf-application
            ComponentDispatcher.ThreadIdle += ComponentDispatcher_ThreadIdle;
        }
        private void SensorColorFrameReady(object sender, ColorImageFrameReadyEventArgs e)
        {
            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                if (colorFrame != null)
                {
                    // Copy the pixel data from the image to a temporary array
                    colorFrame.CopyPixelDataTo(this.colorPixels);

                    // Write the pixel data into our bitmap
                    this.colorBitmap.WritePixels(
                        new Int32Rect(0, 0, this.colorBitmap.PixelWidth, this.colorBitmap.PixelHeight),
                        this.colorPixels,
                        this.colorBitmap.PixelWidth * sizeof(int),
                        0);
                }
               
            }

        }
        int count = 0;
        private void ComponentDispatcher_ThreadIdle(object sender, EventArgs e)
        {
            this.Title = (count++).ToString();
            using (var imageFrame = (BitmapSourceConvert.ToMat(colorBitmap)).ToImage<Bgr, byte>())
            {
                if (imageFrame != null)
                {
                    var grayframe = imageFrame.Convert<Gray, byte>();
                    var vfaces = _face.DetectMultiScale(grayframe, 1.1, 10, System.Drawing.Size.Empty); //the actual face detection happens here
                    if (vfaces.Length > 0)
                    {
                        System.Drawing.Rectangle Maxface = vfaces[0];
                        int maxw = vfaces[0].Width;
                        int maxh = vfaces[0].Height;
                        for (int i = 1; i < vfaces.Length; i++)
                        {
                            if (vfaces[i].Width * vfaces[i].Height > maxw * maxh)
                            {
                                Maxface = vfaces[i];
                                maxw = vfaces[i].Width;
                                maxh = vfaces[i].Height;
                            }
                        }

                        imageFrame.Draw(Maxface, new Bgr(System.Drawing.Color.BurlyWood), 3); //the detected face(s) is highlighted here using a box that is drawn around it/them

                        //---
                        System.Drawing.Point biggestFaceCenter = new System.Drawing.Point(Maxface.X + Maxface.Width / 2, Maxface.Y + Maxface.Height / 2);
                        //Point imageAreaCenter = new Point(imageArea.X + imageArea.Width / 2, imageArea.Y + imageArea.Height / 2);
                        //draw a green cross at the center of the biggest face
                        imageFrame.Draw(
                            new Cross2DF(biggestFaceCenter, Maxface.Width * 0.1f, Maxface.Height * 0.1f),
                            new Bgr(0, 255, 0), 1);

                    }
                }
                image1.Source = BitmapSourceConvert.ToBitmapSource(imageFrame);
            }

        }

        // Add ref: SYstem.Drawing.DLL
        public static class BitmapSourceConvert
        {
            [DllImport("gdi32")]
            private static extern int DeleteObject(IntPtr o);

            public static BitmapSource ToBitmapSource(IImage image)
            {
                using (System.Drawing.Bitmap source = image.Bitmap)
                {
                    IntPtr ptr = source.GetHbitmap();

                    BitmapSource bs = System.Windows.Interop.Imaging.CreateBitmapSourceFromHBitmap(
                        ptr,
                        IntPtr.Zero,
                        Int32Rect.Empty,
                        System.Windows.Media.Imaging.BitmapSizeOptions.FromEmptyOptions());

                    DeleteObject(ptr);
                    return bs;
                }
            }

            //ref: https://stackoverflow.com/questions/16596915/emgu-with-c-sharp-wpf/16597958
            public static Mat ToMat(BitmapSource source)
            {

                if (source.Format == PixelFormats.Bgr32) // .Bgra32)
                {
                    Mat result = new Mat();
                    result.Create(source.PixelHeight, source.PixelWidth, Emgu.CV.CvEnum.DepthType.Cv8U, 4);
                    source.CopyPixels(Int32Rect.Empty, result.DataPointer, result.Step * result.Rows, result.Step);
                    return result;
                }
                else if (source.Format == PixelFormats.Bgr24)
                {
                    Mat result = new Mat();
                    result.Create(source.PixelHeight, source.PixelWidth, Emgu.CV.CvEnum.DepthType.Cv8U, 3);
                    source.CopyPixels(Int32Rect.Empty, result.DataPointer, result.Step * result.Rows, result.Step);
                    return result;
                }
                else
                {
                    throw new Exception(String.Format("Convertion from BitmapSource of format {0} is not supported.", source.Format));
                }
            }
        }
    }
}
2. Result:

Emgu FacialMouseControl (WPF)

Ref: https://github.com/emgucv/emgucv/tree/master/Emgu.CV.Example/FacialMouseControl
Windows 10 x64 @ ASUS X450J + Visual Studio 2017 +  Emgu 3.2.0.2721
1. WPF Webcam + WindowForm FacialMouse
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Data;
using System.Windows.Documents;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Media.Imaging;
using System.Windows.Navigation;
using System.Windows.Shapes;

namespace wk1302
{
    // 1. nuget emgu 3.4.3
    // 2. Add using ref FaceMouse
    using Emgu.CV;
    using Emgu.CV.Structure;
    using Emgu.Util;
    using System.Threading;
    using System.Runtime.InteropServices;
    using System.Windows.Interop;

    /// <summary>
    /// MainWindow.xaml 的互動邏輯
    /// </summary>
    public partial class MainWindow : Window
    {
        private VideoCapture _capture;
        // https://stackoverflow.com/questions/46410342/c-sharp-emgu-could-not-be-found-capture-and-haarcascade
        //private HaarCascade _face;
        private CascadeClassifier _face;
        public MainWindow()
        {
            InitializeComponent();
            _face = new CascadeClassifier("haarcascade_frontalface_alt2.xml");
            if (_capture == null)
            {
                try
                {
                    _capture = new VideoCapture();
                }
                catch (NullReferenceException excpt)
                {
                    MessageBox.Show(excpt.Message);
                    return;
                }
            }
            // https://stackoverflow.com/questions/1111615/getting-inactivity-idle-time-in-a-wpf-application
            ComponentDispatcher.ThreadIdle += ComponentDispatcher_ThreadIdle;
        }
        int count = 0;
        private void ComponentDispatcher_ThreadIdle(object sender, EventArgs e)
        {
            this.Title = (count++).ToString();
            using (var imageFrame = _capture.QueryFrame().ToImage<Bgr, Byte>())
            {
                if (imageFrame != null)
                {
                    var grayframe = imageFrame.Convert<Gray, byte>();
                    var vfaces = _face.DetectMultiScale(grayframe, 1.1, 10, System.Drawing.Size.Empty); //the actual face detection happens here
                    if (vfaces.Length > 0)
                    {
                        System.Drawing.Rectangle Maxface = vfaces[0];
                        int maxw = vfaces[0].Width;
                        int maxh = vfaces[0].Height;
                        for (int i = 1; i < vfaces.Length; i++)
                        {
                            if (vfaces[i].Width * vfaces[i].Height > maxw * maxh)
                            {
                                Maxface = vfaces[i];
                                maxw = vfaces[i].Width;
                                maxh = vfaces[i].Height;
                            }
                        }

                        imageFrame.Draw(Maxface, new Bgr(System.Drawing.Color.BurlyWood), 3); //the detected face(s) is highlighted here using a box that is drawn around it/them

                        //---
                        System.Drawing.Point biggestFaceCenter = new System.Drawing.Point(Maxface.X + Maxface.Width / 2, Maxface.Y + Maxface.Height / 2);
                        //Point imageAreaCenter = new Point(imageArea.X + imageArea.Width / 2, imageArea.Y + imageArea.Height / 2);
                        //draw a green cross at the center of the biggest face
                        imageFrame.Draw(
                            new Cross2DF(biggestFaceCenter, Maxface.Width * 0.1f, Maxface.Height * 0.1f),
                            new Bgr(0, 255, 0), 1);

                    }
                }
                image1.Source = BitmapSourceConvert.ToBitmapSource(imageFrame);
            }

        }

        // Add ref: SYstem.Drawing.DLL
        public static class BitmapSourceConvert
        {
            [DllImport("gdi32")]
            private static extern int DeleteObject(IntPtr o);

            public static BitmapSource ToBitmapSource(IImage image)
            {
                using (System.Drawing.Bitmap source = image.Bitmap)
                {
                    IntPtr ptr = source.GetHbitmap();

                    BitmapSource bs = System.Windows.Interop.Imaging.CreateBitmapSourceFromHBitmap(
                        ptr,
                        IntPtr.Zero,
                        Int32Rect.Empty,
                        System.Windows.Media.Imaging.BitmapSizeOptions.FromEmptyOptions());

                    DeleteObject(ptr);
                    return bs;
                }
            }
        }
    }
}
3. Results


Emgu webcam capture (WPF)

Ref: https://github.com/emgucv/emgucv/tree/master/Emgu.CV.Example/FacialMouseControl
Windows 10 x64 @ ASUS X450J + Visual Studio 2017 +  Emgu 3.2.0.2721
1. Application.Idle (Form) ==> ComponentDispatcher.ThreadIdle (WPF)
2. MainWindows.xaml.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Data;
using System.Windows.Documents;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Media.Imaging;
using System.Windows.Navigation;
using System.Windows.Shapes;
// Ref: https://stackoverflow.com/questions/16596915/emgu-with-c-sharp-wpf
namespace wk1301
{
    // 1. nuget emgu 3.4.3
    // 2. Add using ref FaceMouse
    using Emgu.CV;
    using Emgu.CV.Structure;
    using Emgu.Util;
    using System.Threading;
    using System.Runtime.InteropServices;
    using System.Windows.Interop;

    /// <summary>
    /// MainWindow.xaml 的互動邏輯
    /// </summary>
    public partial class MainWindow : Window
    {
        private VideoCapture _capture;
        //private HaarCascade _face;
        public MainWindow()
        {
            InitializeComponent();
            if (_capture == null)
            {
                try
                {
                    _capture = new VideoCapture();
                }
                catch (NullReferenceException excpt)
                {
                    MessageBox.Show(excpt.Message);
                    return;
                }
            }

            // https://stackoverflow.com/questions/1111615/getting-inactivity-idle-time-in-a-wpf-application
            ComponentDispatcher.ThreadIdle += ComponentDispatcher_ThreadIdle;
        }

        int count = 0;
        private void ComponentDispatcher_ThreadIdle(object sender, EventArgs e)
        {
            this.Title = (count++).ToString();
            using (var imageFrame = _capture.QueryFrame().ToImage<Bgr, Byte>())
            {
                if (imageFrame != null)
                {
                    image1.Source = BitmapSourceConvert.ToBitmapSource(imageFrame);
                }
            }
        }

    }

    // Add ref: SYstem.Drawing.DLL
    public static class BitmapSourceConvert
    {
        [DllImport("gdi32")]
        private static extern int DeleteObject(IntPtr o);

        public static BitmapSource ToBitmapSource(IImage image)
        {
            using (System.Drawing.Bitmap source = image.Bitmap)
            {
                IntPtr ptr = source.GetHbitmap();

                BitmapSource bs = System.Windows.Interop.Imaging.CreateBitmapSourceFromHBitmap(
                    ptr,
                    IntPtr.Zero,
                    Int32Rect.Empty,
                    System.Windows.Media.Imaging.BitmapSizeOptions.FromEmptyOptions());

                DeleteObject(ptr);
                return bs;
            }
        }
    }
}

3. Result:

Emgu FacialMouseControl (3.2.0.2721) - I

Ref: https://github.com/emgucv/emgucv/tree/master/Emgu.CV.Example/FacialMouseControl
Windows 10 x64 @ ASUS X450J + Visual Studio 2017 +  Emgu 3.2.0.2721
1. 版本更新
      private Capture _capture;
      private HaarCascade _face;
==>
// https://stackoverflow.com/questions/46410342/c-sharp-emgu-could-not-be-found-capture-and-haarcascade   

private VideoCapture _capture;
private CascadeClassifier _face;

2. Form.cs 更改
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;

namespace wk1502
{
    using Emgu.CV;
    using Emgu.CV.Structure;
    using Emgu.Util;
    using System.Threading;
    using System.Runtime.InteropServices;

    public partial class Form1 : Form
    {
        private VideoCapture _capture;
        // https://stackoverflow.com/questions/46410342/c-sharp-emgu-could-not-be-found-capture-and-haarcascade
        //private HaarCascade _face;
        private CascadeClassifier _face;

        public Form1()
        {
            InitializeComponent();

            //Read the HaarCascade object
            _face = new CascadeClassifier("haarcascade_frontalface_alt2.xml");

            if (_capture == null)
            {
                try
                {
                    _capture = new VideoCapture();
                }
                catch (NullReferenceException excpt)
                {
                    MessageBox.Show(excpt.Message);
                    return;
                }
            }

            Application.Idle += ProcessImage;
        }

        public void ProcessImage(object sender, EventArgs e)
        {
            using (var imageFrame = _capture.QueryFrame().ToImage<Bgr, Byte>())
            {
                if (imageFrame != null)
                {
                    var grayframe = imageFrame.Convert<Gray, byte>();
                    var vfaces = _face.DetectMultiScale(grayframe, 1.1, 10, Size.Empty); //the actual face detection happens here
                    if (vfaces.Length > 0)
                    {
                        Rectangle Maxface = vfaces[0];
                        int maxw = vfaces[0].Width;
                        int maxh = vfaces[0].Height;
                        for (int i = 1; i < vfaces.Length; i++)
                        {
                            if (vfaces[i].Width * vfaces[i].Height > maxw * maxh)
                            {
                                Maxface = vfaces[i];
                                maxw = vfaces[i].Width;
                                maxh = vfaces[i].Height;
                            }
                        }
                        imageFrame.Draw(Maxface, new Bgr(Color.BurlyWood), 3); //the detected face(s) is highlighted here using a box that is drawn around it/them
                        Point biggestFaceCenter = new Point(Maxface.X + Maxface.Width / 2, Maxface.Y + Maxface.Height / 2);
                        //draw a green cross at the center of the biggest face
                        imageFrame.Draw(
                            new Cross2DF(biggestFaceCenter, Maxface.Width * 0.1f, Maxface.Height * 0.1f),
                            new Bgr(0, 255, 0), 1);
                        this.Text = string.Format("pt:{0},{1}, a:{2}", biggestFaceCenter.X, biggestFaceCenter.Y, Maxface.Width*Maxface.Height);
                    }
                }
                imageBox1.Image = imageFrame;
            }
        }

        [DllImport("user32.dll")]
        private static extern bool GetCursorPos(out System.Drawing.Point lpPoint);

        [DllImport("user32.dll")]
        private static extern bool SetCursorPos(int X, int Y);

        public void ReleaseData()
        {
            if (_capture != null)
                _capture.Dispose();
        }

        private void flipHorizontalButton_Click(object sender, EventArgs e)
        {
            if (_capture != null) _capture.FlipHorizontal = !_capture.FlipHorizontal;
        }
    }
}
3. Result

2018年12月2日 星期日

a-frame tag 結束不可用 />

1. a-frame tag 與 html 不同














2. 比較差異
正常:
<a-scene>
        <a-box position="-2 0.5 -3" rotation="0 45 0" color="red"></a-box>
        <a-box position="0 0.5 -3" rotation="0 45 0" color="green"></a-box>
        <a-box position="2 0.5 -3" rotation="0 45 0" color="blue"></a-box>
</a-scene>
異常(判斷是解譯成子座標系統)
<a-scene>
        <a-box position="-2 0.5 -3" rotation="0 45 0" color="red"/>
        <a-box position="0 0.5 -3" rotation="0 45 0" color="green"/>
        <a-box position="2 0.5 -3" rotation="0 45 0" color="blue"/>
 </a-scene>

2018年11月26日 星期一

asp.net core https

Ref: https://docs.microsoft.com/zh-tw/aspnet/core/security/enforcing-ssl?view=aspnetcore-2.1&tabs=visual-studio

1. ASUS X450J @ Windows 10 x64
2. Visual Studio 2017 (dotnet --version 2.1.403)
3. IP: 192.168.1.108
//-------------
4. dotnet new web -o https1   // create server template
5. Program.cs
//  add .UseUrls("http://192.168.1.108:5000/", "https://192.168.1.108:5001/")
6. Startup.cs
public void ConfigureServices(IServiceCollection services)
        {
            services.AddMvc(); // Add
        }

        // This method gets called by the runtime. Use this method to configure the HTTP request pipeline.
        public void Configure(IApplicationBuilder app, IHostingEnvironment env)
        {
            /* ----------------------
                if (env.IsDevelopment())
                {
                    app.UseDeveloperExceptionPage();
                }

                app.Run(async (context) =>
                {
                    await context.Response.WriteAsync("Hello World!");
                });
                -------------------------------*/
            // Ref: https://docs.microsoft.com/zh-tw/aspnet/core/security/enforcing-ssl?view=aspnetcore-2.1&tabs=visual-studio
            if (env.IsDevelopment())
            {
                app.UseDeveloperExceptionPage();
            }
            else
            {
                app.UseExceptionHandler("/Error");
                app.UseHsts();
            }

            app.UseHttpsRedirection();
            app.UseStaticFiles();
            app.UseCookiePolicy();
            app.UseMvc();
        }
7. dotnet run // Testing

2018年11月25日 星期日

WPF Webcam (nuget WPF-MediaKit)

Ref:  https://github.com/Sascha-L/WPF-MediaKit/
1. ASUS X450J @ Windows 10 x64
2. Visual Studio 2017
3.  nuget add
3-1 Helixtoolkit.wpf
3-2 WPFMediaKit
4. new WPF Project
4-1 xaml
<Window x:Class="wk1108.MainWindow"
        xmlns="http://schemas.microsoft.com/winfx/2006/xaml/presentation"
        xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"
        xmlns:d="http://schemas.microsoft.com/expression/blend/2008"
        xmlns:mc="http://schemas.openxmlformats.org/markup-compatibility/2006"
        xmlns:local="clr-namespace:wk1108"
        xmlns:med="clr-namespace:WPFMediaKit.DirectShow.Controls;assembly=WPFMediaKit"
        xmlns:h="http://helix-toolkit.org/wpf"
        mc:Ignorable="d"
        Title="MainWindow" Height="450" Width="800">
    <Grid>
        <h:HelixViewport3D>
            <h:DefaultLights/>
            <ModelVisual3D>
                <ModelVisual3D.Content>
                    <GeometryModel3D>
                        <GeometryModel3D.Geometry>
                            <MeshGeometry3D
              Positions="-1 -1 0  1 -1 0  -1 1 0  1 1 0"
              TextureCoordinates="1 0  0 0  1 1  0 1   "
              TriangleIndices="0 1 2  1 3 2" />
                        </GeometryModel3D.Geometry>
                        <GeometryModel3D.Material>
                            <DiffuseMaterial>
                                <DiffuseMaterial.Brush>
                                    <VisualBrush>
                                        <VisualBrush.Visual>
                                            <med:VideoCaptureElement Name="VC" EnableSampleGrabbing="True" />
                                        </VisualBrush.Visual>
                                    </VisualBrush>
                                </DiffuseMaterial.Brush>
                            </DiffuseMaterial>
                        </GeometryModel3D.Material>
                        <!-- Translate the plane. -->
                        <GeometryModel3D.Transform>
                            <TranslateTransform3D
            OffsetX="2" OffsetY="0" OffsetZ="-1"   >
                            </TranslateTransform3D>
                        </GeometryModel3D.Transform>
                    </GeometryModel3D>
                </ModelVisual3D.Content>
            </ModelVisual3D>
        </h:HelixViewport3D>
       
    </Grid>
</Window>
4-2 C# code
 public partial class MainWindow : Window
    {
        public MainWindow()
        {
            InitializeComponent();
            this.Closing += MainWindow_Closing;
            VC.VideoCaptureDevice = WPFMediaKit.DirectShow.Controls.MultimediaUtil.VideoInputDevices[0];
        }

        private void MainWindow_Closing(object sender, System.ComponentModel.CancelEventArgs e)
        {
            VC.Stop();
        }
    }
4-3 手動加入 DirectShowLib-2005.DLL @ package build folder
5. Result





2018年11月17日 星期六

Helixtoolkit Input Gesture

1. 原來在 http://helixtoolkit.codeplex.com/wikipage?title=Input%20gestures&referringTitle=Documentation 的說明,網路上找不到了。
2. 儲存的文件,供參考。


2018年11月9日 星期五

C# plplot 1 - Simple plot

Ref: http://plplot.sourceforge.net/docbook-manual/plplot-html-5.13.0/simple-graph.html

VS 2017 @ Windows 10 x64 (ASUS X450J)

In order to draw such a simple graph, it is necessary to call at least four of the PLplot functions:
  1. plinit, to initialize PLplot.
  2. plenv, to define the range and scale of the graph, and draw labels, axes, etc.
  3. One or more calls to plline or plstring to draw lines or points as needed. Other more complex routines include plbin and plhist to draw histograms, and plerrx and plerry to draw error-bars.
  4. plend, to close the plot.
1-1 New console project
1-2 nuget add  plplot (5.13.4 latest 5.13.7)
1-3 using PLplot;
1-4 example 0x1
1-5 error
System.BadImageFormatException: '試圖載入格式錯誤的程式。 (發生例外狀況於 HRESULT: 0x8007000B)'
1-6 Any cpu 改成 x64
double[] x = new double[10], y = new double[10];
            for (int i=0; i<10; i++)
            {
                x[i] = (double)i;
                y[i] = Math.Pow(x[i], 2);
            }
            var pl = new PLStream();
            pl.sdev("pngcairo");
            pl.sfnam("pngex1.png");
            pl.spal0("cmap0_alternate.pal");
            pl.init();
            pl.env(0, 10, 0, 100, AxesScale.Independent, AxisBox.BoxTicksLabelsAxes);
            pl.schr(0, 1.25);
            pl.lab("X-axis", "Y-axis", "Title");
            pl.col0(9);
            pl.line(x, y);
            pl.col0(3);
            pl.sym(x, y, '0');
            pl.eop();
            pl.gver(out var verText);
            //Console.ReadLine();
            Console.WriteLine("Showing chart...");
            var p = new Process();
            string chartFileNamePath = @".\" + "pngex1.png";
            p.StartInfo = new ProcessStartInfo(chartFileNamePath)
            {
                UseShellExecute = true
            };
            p.Start();
           // Console.ReadLine();
1-7 Console mode 程式需結束,否則會有一個大的黑色方塊在中下方。
1-8 座標軸標示無法處理中文。

2018年11月8日 星期四

a-frame 全景內外層不同貼圖

Ref: https://stackoverflow.com/questions/39626036/how-do-i-texture-the-inside-of-a-cylinder-in-a-frame

1. a-frame 沒有backmaterial 屬性,只有src設定貼圖。
2. side="double" 設定內外相同貼圖。
3. 內側貼圖鏡射圖。
4. 內側以鏡射圖貼於略小模型,如下:

<a-assets>
    <img id="sky" src="grid.png">
    <img id="ins" src="grid鏡射圖.png">
</a-assets>
<a-sphere radius="2.99" side="double" src="#ins"/>
<a-sphere radius="3" src="#sky"/>

2018年10月30日 星期二

plplot 5.13.7 error: 無法載入 DLL 'plplot

1. ASUS X450J @ Windows 10 x64 VS2017
2. Ref: https://github.com/surban/PLplotNet/blob/master/Samples/CSharp/SineWaves/Program.cs
3. Ref: https://docs.microsoft.com/zh-tw/dotnet/machine-learning/tutorials/taxi-fare
            Console.WriteLine("Showing chart...");
            var p = new Process();
            string chartFileNamePath = @".\" + "SineWaves.png";
            p.StartInfo = new ProcessStartInfo(chartFileNamePath)
            {
                UseShellExecute = true
            };
            p.Start();
4. 5.13.7 error 無法載入 DLL 'plplot
5. Change into 5.13.4
Any CPU
System.BadImageFormatException: '試圖載入格式錯誤的程式。 (發生例外狀況於 HRESULT: 0x8007000B)'
6. x64 OK



2018年10月23日 星期二

Failed to load dae file: the server responded with a status of 404 ()

1. asp.net core failed to load dae file and the server responded with a status of 404 ().
2. Add mime type as follows
public void Configure(IApplicationBuilder app, IHostingEnvironment env)
        {
            //app.UseStaticFiles();
            var provider = new FileExtensionContentTypeProvider();
            // Add new mappings
            provider.Mappings[".dae"] = "application/octet-stream";  // "text /xml"; 
            app.UseStaticFiles(new StaticFileOptions
            {
                FileProvider = new PhysicalFileProvider(
            Path.Combine(Directory.GetCurrentDirectory(), "wwwroot")),
                RequestPath = "",
                ContentTypeProvider = provider
            });
        }
3. 一旦可以讀取dae檔案,再移除mime type,仍可以載入dae檔案。

2018年8月29日 星期三

Edge Error Code: INET_E_RESOURCE_NOT_FOUND

Edge @ Windows 10 can not access webpage (IE and Chrome can).


Edge Error Code: INET_E_RESOURCE_NOT_FOUND


2018年8月18日 星期六

清除第二顆硬碟Windows/Program files目錄

REF:https://superuser.com/questions/915173/delete-old-windows-program-files-from-second-drive

Windows 10 @  ASUS X450J
1. cmd @ administrator
2. takeown /F "D:\Program Files" /A /R /D Y
3. cacls "D:\Program Files" /T /grant administrators:F
4. rmdir /s /q "D:\Program Files"

users 目錄部分無法刪除。



2018年7月17日 星期二

ML.NET 教學123 翻譯-1

Goolge 翻譯 Readme
1-1
#用戶評論的情感分析
在這個介紹性的示例中,您將看到如何使用[ML.NET](https://www.microsoft.com/net/learn/apps/machine-learning-and-ai/ml-dotnet)來預測情緒(正面或負面)用於客戶評論。
    在機器學習的世界中,這種類型的預測被稱為**二元分類**。
##問題
這個問題集中在預測客戶的評論是否具有正面或負面情緒。我們將使用由人處理的IMDB(網路電影資料庫)和Yelp(群眾評論論壇平台公司)評論,並為每個評論給定了一個標籤:
* 0 - 否定
* 1 - 積極

使用這些數據集,我們將構建一個模型,用於分析字符串並預測情緒值為0或1。

## ML任務 - 二元分類
**二元分類**的廣義問題是將項分類為兩個類中的一個(將項分類為兩個以上的類稱為**多類分類**)。例如:

*預測保險索賠是否有效。
*預測飛機是否會延遲或準時到達。
*預測面部ID(照片)是否屬於設備的所有者。

所有這些示例的共同特徵是我們想要預測的參數只能採用兩個值中的一個。換句話說,該值由`boolean`類型表示。

##解決方案
要解決這個問題,首先我們將構建一個ML模型。然後我們將對現有數據進行模型訓練,評估其有多好,最後我們將使用該模型來預測新評論的情緒。

![Build - > Train - > Evaluate - > Consume](https://github.com/dotnet/machinelearning-samples/raw/master/samples/getting-started/shared_content/modelpipeline.png)

### 1.構建模型

構建模型包括:上傳數據(`sentiment-imdb-train.txt`和`TextLoader`),轉換數據,以便ML算法(使用`TextFeaturizer`)有效地使用,並選擇學習算法(` FastTreeBinaryClassifier`)。所有這些步驟都存儲在`LearningPipeline`中:
C# 註解與程式碼
// LearningPipeline類別(class)包含學習過程的所有步驟:數據,轉換,學習物件。
var pipeline = new LearningPipeline();
// TextLoader加載數據集。通過傳遞包含的類來指定數據集的模式
//包含所有列名稱及其類型。
pipeline.Add(new TextLoader(TrainDataPath).CreateFrom <SentimentData>());
// TextFeaturizer是一個轉換類別,用於對輸入列進行特徵化以格式化和理數據。
pipeline.Add(new TextFeaturizer(“Features”,“SentimentText”));
// FastTreeBinaryClassifier是一種用於訓練模型的算法。
//它有三個超參數用於調整決策樹性能。
pipeline.Add(new FastTreeBinaryClassifier(){NumLeaves = 5,NumTrees = 5,MinDocumentsInLeafs = 2});
```
### 2.訓練模型
訓練模型是在訓練數據(具有已知情緒值)上運行所選算法以調整模型參數的過程。它在`Train()`API中實現。為了執行訓練,我們只需調用方法並為我們的數據對象`SentimentData`和預測對象`SentimentPrediction`提供類型。
```CSHARP
var model = pipeline.Train <SentimentData,SentimentPrediction>();
```
### 3.評估模型
我們需要這一步來總結我們的模型對新數據的準確性。為此,上一步中的模型針對另一個未在訓練中使用的數據集(`sentiment-yelp-test.txt`)運行。此數據集還包含已知情緒。 `BinaryClassificationEvaluator`計算已知票價與模型在各種指標中預測的值之間的差異。
C# 註解與程式碼
    var testData = new TextLoader(TestDataPath).CreateFrom <SentimentData>();

    var evaluationator = new BinaryClassificationEvaluator();
    var metrics = evaluationator.Evaluate(model,testData);
```
> *要了解有關如何理解指標的更多信息,請查看[ML.NET指南](https://docs.microsoft.com/en-us/dotnet/machine-learning/)中的機器學習詞彙表或使用任何有關數據科學和機器學習的材料*。

如果您對模型的質量不滿意,可以採用多種方法對其進行改進,這些方法將在* examples *類別中介紹。

> *請記住,對於此樣本,質量低於可能的質量,因為為了性能目的,數據集的大小已經減小。您可以使用在線提供的更大標籤情緒數據集來顯著提高質量。*

### 4.使用模型
在訓練模型之後,我們可以使用`Predict()`API來預測新評論的情緒。

C# 註解與程式碼
var predictions = model.Predict(TestSentimentData.Sentiments);
```
其中`TestSentimentData.Sentiments`包含我們想要分析的新用戶評論。

C# 註解與程式碼
internal static readonly IEnumerable<SentimentData> Sentiments = new[]
{
    new SentimentData
    {
        SentimentText = "Contoso's 11 is a wonderful experience",
        Sentiment = 0
    },
    new SentimentData
    {
        SentimentText = "The acting in this movie is very bad",
        Sentiment = 0
    },
    new SentimentData
    {
        SentimentText = "Joe versus the Volcano Coffee Company is a great film.",
        Sentiment = 0
    }
};

ML.NET 教學課程 1 2 3

ASUS X450J @ Windows 10 x64 + Visual Studio 2017
https://www.microsoft.com/net/learn/apps/machine-learning-and-ai/ml-dotnet/get-started/windows
1. Hello ML.NET
1-1 cmd
1-2 dotnet new console -o myApp
1-3 cd myApp
1-4 dotnet add package Microsoft.ML --version 0.3.0
1-6 Program.cs
using System; using Microsoft.ML; using Microsoft.ML.Data; using Microsoft.ML.Runtime.Api; using Microsoft.ML.Trainers; using Microsoft.ML.Transforms; namespace mlex1 { class Program { // STEP 1: Define your data structures // IrisData is used to provide training data, and as // input for prediction operations // - First 4 properties are inputs/features used to predict the label // - Label is what you are predicting, and is only set when training public class IrisData { [Column("0")] public float SepalLength; [Column("1")] public float SepalWidth; [Column("2")] public float PetalLength; [Column("3")] public float PetalWidth; [Column("4")] [ColumnName("Label")] public string Label; } // IrisPrediction is the result returned from prediction operations public class IrisPrediction { [ColumnName("PredictedLabel")] public string PredictedLabels; } static void Main(string[] args) { // STEP 2: Create a pipeline and load your data var pipeline = new LearningPipeline(); // If working in Visual Studio, make sure the 'Copy to Output Directory' // property of iris-data.txt is set to 'Copy always' string dataPath = "iris-data.txt"; pipeline.Add(new TextLoader(dataPath).CreateFrom<IrisData>(separator: ',')); // STEP 3: Transform your data // Assign numeric values to text in the "Label" column, because only // numbers can be processed during model training pipeline.Add(new Dictionarizer("Label")); // Puts all features into a vector pipeline.Add(new ColumnConcatenator("Features", "SepalLength", "SepalWidth", "PetalLength", "PetalWidth")); // STEP 4: Add learner // Add a learning algorithm to the pipeline. // This is a classification scenario (What type of iris is this?) pipeline.Add(new StochasticDualCoordinateAscentClassifier()); // Convert the Label back into original text (after converting to number in step 3) pipeline.Add(new PredictedLabelColumnOriginalValueConverter() { PredictedLabelColumn = "PredictedLabel" }); // STEP 5: Train your model based on the data set var model = pipeline.Train<IrisData, IrisPrediction>(); // STEP 6: Use your model to make a prediction // You can change these numbers to test different predictions var prediction = model.Predict(new IrisData() { SepalLength = 3.3f, SepalWidth = 1.6f, PetalLength = 0.2f, PetalWidth = 5.1f, }); Console.WriteLine($"Predicted flower type is: {prediction.PredictedLabels}"); } } }
1-7 Download iris-data.txt
1-8



Ref:https://docs.microsoft.com/zh-tw/dotnet/machine-learning/tutorials/sentiment-analysis
2. 情感分析二元分類案例
https://github.com/dotnet/machinelearning-samples
2-1 download zip
2-2 unzip
2-3 copy datasets folder into project
2-4 run

3. Clustering_iris
3-1 Download (as previous case)
3-2 copy copy datasets folder into project
3-3 run

2018年7月11日 星期三

Javascript 3D + Audio - 1

整合 P5.Sound + a-frame.io
Ref: cdn p5.js and p5.sound.js
cdn  :   https://cdnjs.com/libraries/p5.js/
Ref: a-frame  https://aframe.io/docs/0.8.0/introduction/
1. p5.js example
1-1
    <script src="https://cdnjs.cloudflare.com/ajax/libs/p5.js/0.6.1/p5.js"></script>
1-2 初始化設定
1-2-1 function setup() 環境設定
1-2-2 function draw()  定時重繪畫面
1-3 p5.sound 初始化
<script src="https://cdnjs.cloudflare.com/ajax/libs/p5.js/0.6.1/addons/p5.sound.min.js"></script>
1-3-1 function preload() 載入mp3 file
1-3-2 配合 p5.js setup()與draw() 處理聲音同步
1-4 a-frame 3D 架構
    <script src="https://aframe.io/releases/0.8.0/aframe.min.js"></script>
1-4-1 3D 場景
<body>內建置<a-scene>場景
1-4-2 draw 取得amp與3D屬性同步修改

1-5 範例
<html>
<head>
    <script src="https://cdnjs.cloudflare.com/ajax/libs/p5.js/0.6.1/p5.js"></script>
    <script src="https://cdnjs.cloudflare.com/ajax/libs/p5.js/0.6.1/addons/p5.sound.min.js"></script>
    <script src="https://aframe.io/releases/0.8.0/aframe.min.js"></script>
    <script>
function preload(){
  sound = loadSound('assets/aa1.mp3');
}
function setup() {
  amplitude = new p5.Amplitude();

  // start / stop the sound when canvas is clicked
document.addEventListener("click", function(){
     if (sound.isPlaying() ){
          sound.stop();
        } else {
          sound.play();
        }
    });
}
function draw() {
  var level = amplitude.getLevel();
  var sceneEl = document.querySelector('a-scene');
  var sphere1 = sceneEl.querySelector('#sphere1');
  sphere1.setAttribute('radius', level * 20);
}
    </script>
</head>
<body>

    <a-scene>
      <a-box color="#4CC3D9" position="-1 0.5 -3" rotation="0 45 0"></a-box>
      <a-sphere color="#EF2D5E" id="sphere1" position="0 1.25 -5" radius="1.25"></a-sphere>
      <a-cylinder color="#FFC65D" height="1.5" position="1 0.75 -3" radius="0.5"></a-cylinder>
      <a-plane color="#7BC8A4" height="4" position="0 0 -4" rotation="-90 0 0" width="4"></a-plane>
      <a-sky color="#ECECEC"></a-sky>
    </a-scene>
</body>
</html>

2018年6月27日 星期三

HWU VR Project


1. Download html folder
2. Unzip html
3. cmd
4. cd E:\Course1062\project
5. ipconfig
6. dotnet new web -o html

7. 修改 Program.cs
     .UseUrls("http://192.168.225.xxx:5000")
8. 修改 Startup,cs
     app.UseStaticFiles();

9. Copy html into html

10. cd html
11 dotnet run
12. Chrome http://192.168.225.xxx:5000/s1.html

13. iphone http://192.168.225.xxx:5000/s1.html
14. Android http://192.168.225.xxx:5000/s1.html

2018年6月23日 星期六

Taipei city open data example (python)

Ref: https://youtu.be/sUzR3QVBKIo

ASUS X450J @ Windows 10 x 64
Visual Studio 2017 (python)

data ID
http://data.taipei/opendata/datalist/datasetMeta/outboundDesc;jsessionid=D984042CBA68D2341286C1DA0E9DE9C1?id=15c3e1ae-899b-466c-a536-208497e3a369&rid=296acfa2-5d93-4706-ad58-e83cc951863c

取得格式 (http://data.taipei/opendata/developer)
取得全部資料
您可以透過以下取得資料項目id為35aa3c53-28fb-423c-91b6-2c22432d0d70的全部內容:
http://data.taipei/opendata/datalist/apiAccess?scope=resourceAquire
&rid=35aa3c53-28fb-423c-91b6-2c22432d0d70


1. Code
import urllib.request as request
import json
src = "http://data.taipei/opendata/datalist/apiAccess?scope=resourceAquire&rid=296acfa2-5d93-4706-ad58-e83cc951863c"
with request.urlopen(src) as response:
    data = json.load(response)
# 取得公司名稱列表
clist = data["result"]["results"]
with open("data.txt", "w", encoding="utf-8") as file:
  for company in clist:
    file.write(company["公司名稱"] + "\r\n")
2. 說明
使用 urllib / json 模組
台北市公開資料 (src)
資料首列
{"result":{"offset":0,"limit":10000,"count":5225,"sort":"","results":[{"_id":"1","統編":"04474768","公司名稱":"優鐠電子股份有限公司","公司地址":"114臺北市內湖區瑞光路258巷2號10樓","ADDR_X":"308051.826","ADDR_Y":"2774219.522"}
...
3. 結果