Raymarching Visible Human in VR with Unity Engine December 2021

Поделиться
HTML-код
  • Опубликовано: 31 дек 2024

Комментарии • 6

  • @s7887177
    @s7887177 Год назад

    Hi Arthur, I am interested in this project too.I am more focus on computational geometry side, have only little knowledge about shader. Could I get your project too? I will be very grateful~

  • @stanisawcetkowski5965
    @stanisawcetkowski5965 2 года назад

    Would you like to share the Unity project? I'm having a hard time trying to render a 3D texture in Oculus Quest 2. I'm not a pro, just a student and I would like to render a 3D texture from a sequence of 2D slices. Many thanks

    • @arthurchu2489
      @arthurchu2489  2 года назад

      Hi sorry for getting back to you so late! I tried to upload the whole thing to github but I'm having trouble with that. The project also has a lot of things to work on; we're planning to finish it by May so I can get you a completed version then. Do you want the scripts though? I can email you the script I'm using to generate the 3D texture from a series of png's along with the raymarcher script to display the texture on a cube. Reply with your email and I can send them your way. What are you trying to render in 3D?

    • @arthurchu2489
      @arthurchu2489  2 года назад

      In case you're still interested, here's the script I used to make the 3D texture. I don't know why I didn't think to just put the scripts here:
      This is an editor script so it goes into the "Editor" folder and adds a button to the toolbar at the top of the unity editor window that runs this script when clicked.
      using UnityEditor;
      using UnityEngine;
      using System.IO;
      using System;
      using System.Collections.Generic;
      public class ExampleEditorScript : MonoBehaviour
      {
      [MenuItem("CreateVisibleHumanTexture/Create")]

      static void CreateTexture3D()
      {
      //HEAD
      int numFiles=800;
      string assetName = numFiles+"Slices";
      string path = "Assets/Human Slices/final images";
      // Texture2D sampleTex = new Texture2D(2,2);
      // sampleTex.LoadImage(File.ReadAllBytes("Assets/Human Slices/Head/Head (1).png"));
      int fileWidth = 512;//1024;
      int fileHeight = 256;//512;
      DirectoryInfo dir = new DirectoryInfo(path);
      FileInfo[] info = dir.GetFiles("*.png");
      List files = new List();
      foreach(FileInfo f in info)
      {
      files.Add(f.ToString());
      }
      files.Sort();
      Texture3D texture = new Texture3D(fileWidth,fileHeight,numFiles, TextureFormat.RGBA32, true);
      Color[] colors = new Color[fileWidth* fileHeight*numFiles];

      for(int z=0;z

    • @arthurchu2489
      @arthurchu2489  2 года назад

      And then here's the shader used to display the texture. For each pixel in the texture, it does a distance check from a brush's position. If it's close enough, the color of the pixel is set to transparent.
      Shader "Unlit/VolumeShader"
      {
      Properties
      {
      _MainTex ("Texture", 3D) = "white" {}
      _Alpha ("Alpha", float) = 0.02
      _StepSize ("Step Size", float) = 0.01
      }
      SubShader
      {
      Tags { "Queue" = "Transparent" "RenderType" = "Transparent" }
      Blend One OneMinusSrcAlpha
      LOD 100
      Pass
      {
      CGPROGRAM
      #pragma vertex vert
      #pragma fragment frag
      #include "UnityCG.cginc"
      // Maximum amount of raymarching samples
      #define MAX_STEP_COUNT 1024//512//256//128
      // Allowed floating point inaccuracy
      #define EPSILON 0.00001f
      struct appdata
      {
      float4 vertex : POSITION;
      };
      struct v2f
      {
      float4 vertex : SV_POSITION;
      float3 objectVertex : TEXCOORD0;
      float3 worldVertex : TEXCOORD1;
      float3 vectorToSurface : TEXCOORD2;
      //float3 ro : TEXCOORD3;
      };
      sampler3D _MainTex;
      float4 _MainTex_ST;
      float _Alpha;
      float _StepSize;
      uniform float _Brush1PositionX;
      uniform float _Brush1PositionY;
      uniform float _Brush1PositionZ;
      uniform float _Brush2PositionX;
      uniform float _Brush2PositionY;
      uniform float _Brush2PositionZ;
      uniform float _Brush3PositionX;
      uniform float _Brush3PositionY;
      uniform float _Brush3PositionZ;
      uniform float _Brush4PositionX;
      uniform float _Brush4PositionY;
      uniform float _Brush4PositionZ;
      v2f vert (appdata v)
      {
      v2f o;
      // Vertex in object space this will be the starting point of raymarching
      o.objectVertex = v.vertex;
      // Calculate vector from camera to vertex in world space
      float3 worldVertex = mul(unity_ObjectToWorld, v.vertex).xyz;
      o.worldVertex = worldVertex;
      o.vectorToSurface = worldVertex - _WorldSpaceCameraPos;//world space
      o.vertex = UnityObjectToClipPos(v.vertex);
      return o;
      }
      float4 BlendUnder(float4 color, float4 newColor)
      {
      color.rgb += (1.0 - color.a) * newColor.a * newColor.rgb;
      color.a += (1.0 - color.a) * newColor.a;
      return color;
      }
      bool sdBox( float3 p, float3 b ,float size)
      {
      bool result = true;
      if(p.x>b.x)
      {
      if(p.x - b.x >size)
      {
      result = false;
      }
      }else{
      if(b.x - p.x > size)
      {
      result = false;
      }
      }
      if(p.y>b.y)
      {
      if(p.y - b.y >size)
      {
      result = false;
      }
      }else{
      if(b.y - p.y > size)
      {
      result = false;
      }
      }
      if(p.z>b.z)
      {
      if(p.z - b.z >size)
      {
      result = false;
      }
      }else{
      if(b.z - p.z > size)
      {
      result = false;
      }
      }
      return result;
      }
      fixed4 frag(v2f i) : SV_Target
      {
      // Start raymarching at the front surface of the object
      float3 rayOrigin = i.objectVertex;
      // Use vector from camera to object surface to get ray direction
      float4 rayDirection = mul(unity_WorldToObject, float4(normalize(i.vectorToSurface), 1));
      float4 color = float4(0, 0, 0, 0);
      float3 samplePosition = rayOrigin;
      // Raymarch through object space
      for (int i = 0; i < MAX_STEP_COUNT; i++)
      {
      // Accumulate color only within unit cube bounds
      if(max(abs(samplePosition.x), max(abs(samplePosition.y), abs(samplePosition.z))) < 0.5f + EPSILON)
      {
      float4 sampledColor = tex3D(_MainTex, samplePosition + float3(0.5f, 0.5f, 0.5f));
      sampledColor.a *= _Alpha;

      if(distance( samplePosition,mul(unity_WorldToObject,float4(_Brush1PositionX,_Brush1PositionZ,-_Brush1PositionY,1)) ).05f && distance(samplePosition,mul(unity_WorldToObject,float4(_Brush4PositionX,_Brush4PositionZ,-_Brush4PositionY,1)))>.05f|| sdBox(samplePosition,mul(unity_WorldToObject,float4(_Brush3PositionX,_Brush3PositionZ,-_Brush3PositionY,1)) ,0.25f) && !sdBox(samplePosition,mul(unity_WorldToObject,float4(_Brush4PositionX,_Brush4PositionZ,-_Brush4PositionY,1)), 0.1f) && distance(samplePosition,mul(unity_WorldToObject,float4(_Brush2PositionX,_Brush2PositionZ,-_Brush2PositionY,1)))>.05f)
      {
      sampledColor.a=0;
      }

      color = BlendUnder(color, sampledColor);
      samplePosition += rayDirection * _StepSize;

      }
      }
      return color;
      }
      ENDCG
      }
      }
      }

    • @DanyloSyrotynskyy
      @DanyloSyrotynskyy 2 года назад

      @@arthurchu2489 is this method suitable to calculate ray origin and ray direction for camera? here is my script:
      Shader "Dansyrotyn/Raymarcher_World"
      {
      Properties
      {
      _MainTex ("Texture", 2D) = "white" {}
      }
      SubShader
      {
      // No culling or depth
      Cull Off ZWrite Off ZTest Always
      Pass
      {
      CGPROGRAM
      #pragma vertex vert
      #pragma fragment frag
      #pragma target 3.0
      #include "DistanceFunctions.cginc"
      #include "UnityCG.cginc"
      uniform sampler2D _CameraDepthTexture;
      uniform float4x4 _CamFrustum, _CamToWorld;
      uniform float _MaxDistance;
      uniform float4 _Sphere1;
      uniform float4 _Box1;
      uniform float3 _ModInterval;
      uniform float3 _LightDir;
      uniform fixed4 _MainColor;
      struct appdata
      {
      float4 vertex : POSITION;
      float2 uv : TEXCOORD0;
      UNITY_VERTEX_INPUT_INSTANCE_ID //Insert
      };
      struct v2f
      {
      float2 uv : TEXCOORD0;
      float4 vertex : SV_POSITION;
      float3 ray : TEXCOORD1;
      UNITY_VERTEX_OUTPUT_STEREO //Insert
      };
      v2f vert (appdata v)
      {
      v2f o;
      UNITY_SETUP_INSTANCE_ID(v); //Insert
      UNITY_INITIALIZE_OUTPUT(v2f, o); //Insert
      UNITY_INITIALIZE_VERTEX_OUTPUT_STEREO(o); //Insert
      half index = v.vertex.z;
      v.vertex.z = 0;
      o.vertex = UnityObjectToClipPos(v.vertex);
      o.uv = v.uv;
      o.ray = _CamFrustum[(int)index].xyz;
      o.ray /= abs(o.ray.z);
      o.ray = mul(_CamToWorld, o.ray);
      return o;
      }
      float BoxSphere(float3 position)
      {
      float modX = pMod1 (position.x, _ModInterval.x);
      float modY = pMod1 (position.y, _ModInterval.y);
      float modZ = pMod1 (position.z, _ModInterval.z);
      float sphere1 = sdSphere(position - _Sphere1.xyz, _Sphere1.w);
      float box1 = sdRoundBox(position - _Box1.xyz, _Box1.www, 1);
      float combine1 = opSS(sphere1, box1, 1);

      float sphere2 = sdSphere(position - _Sphere1.xyz, 4.5);
      float combine2 = opIS(sphere2, combine1, 1.3);
      return combine2;
      }
      float BoxSphere1(float3 p)
      {
      float modX = pMod1 (p.x, _ModInterval.x);
      float modY = pMod1 (p.y, _ModInterval.y);
      float modZ = pMod1 (p.z, _ModInterval.z);
      float Sphere1 = sdSphere(p - _Sphere1.xyz, _Sphere1.w);
      float Box1 = sdBox(p - _Box1.xyz, _Box1.www);
      return opIS(Sphere1, Box1, 1);
      }
      float distanceField (float3 p) {
      //return BoxSphere(p);
      float modX = pMod1 (p.x, _ModInterval.x);
      float modY = pMod1 (p.y, _ModInterval.y);
      float modZ = pMod1 (p.z, _ModInterval.z);
      float Sphere1 = sdSphere(p - _Sphere1.xyz, _Sphere1.w);
      float Box1 = sdBox(p - _Box1.xyz, _Box1.www);
      return opS(Sphere1, Box1);
      }
      float3 getNormal(float3 p) {
      const float2 offset = float2(0.001, 0.0);
      float3 n = float3 (
      distanceField(p + offset.xyy) - distanceField(p - offset.xyy),
      distanceField(p + offset.yxy) - distanceField(p - offset.yxy),
      distanceField(p + offset.yyx) - distanceField(p - offset.yyx)
      );
      return normalize(n);
      }
      fixed4 raymarching(float3 ro, float3 rd, float depth) {
      fixed4 result;
      result = fixed4(1, 1, 1, 0);
      const int max_iteration = 264;
      float t = 0;
      for (int i = 0; i < max_iteration; i++) {
      if (t > _MaxDistance || t > depth)
      {
      result = fixed4(rd, 0);
      break;
      }
      float3 p = ro + rd * t;
      float d = distanceField (p);
      if (d < 0.01) {
      float3 n = getNormal(p);
      float light = dot(-_LightDir, n) * .75 + .25;
      result = fixed4(_MainColor.rgb * light,1);
      break;
      }
      t += d;
      }
      return result;
      }
      UNITY_DECLARE_SCREENSPACE_TEXTURE(_MainTex); //Insert
      fixed4 frag (v2f i) : SV_Target
      {
      float depth = LinearEyeDepth(tex2D(_CameraDepthTexture, i.uv).r);
      depth *= length(i.ray);
      UNITY_SETUP_STEREO_EYE_INDEX_POST_VERTEX(i); //Insert
      fixed4 col = UNITY_SAMPLE_SCREENSPACE_TEXTURE(_MainTex, i.uv); //Insert
      float3 rayDirection = normalize(i.ray.xyz);
      float3 rayOrigin = _WorldSpaceCameraPos;
      fixed4 result;
      result = raymarching(rayOrigin, rayDirection, depth);
      return fixed4(col * (1.0 - result.w) + result.xyz * result.w, 1.0);
      }
      ENDCG
      }
      }
      }